text
stringlengths 2
6.14k
|
|---|
import json
import tornado.web
import tornado.gen
from common.sentry import sentry
from common.web import requestsManager
from constants import exceptions
from helpers import chatHelper
from objects import glob
class handler(requestsManager.asyncRequestHandler):
@tornado.web.asynchronous
@tornado.gen.engine
@sentry.captureTornado
def asyncGet(self):
statusCode = 400
data = {"message": "unknown error"}
try:
# Check arguments
if not requestsManager.checkArguments(self.request.arguments, ["k", "to", "msg"]):
raise exceptions.invalidArgumentsException()
# Check ci key
key = self.get_argument("k")
if key is None or key != glob.conf.config["server"]["cikey"]:
raise exceptions.invalidArgumentsException()
chatHelper.sendMessage(
"FokaBot",
self.get_argument("to").encode().decode("ASCII", "ignore"),
self.get_argument("msg").encode().decode("ASCII", "ignore")
)
# Status code and message
statusCode = 200
data["message"] = "ok"
except exceptions.invalidArgumentsException:
statusCode = 400
data["message"] = "invalid parameters"
finally:
# Add status code to data
data["status"] = statusCode
# Send response
self.write(json.dumps(data))
self.set_status(statusCode)
|
<?php
/** ______________________________________________
* o O | |
* ((((( o < JDom Class - Cook Self Service library |
* ( o o ) |______________________________________________|
* --------oOOO-----(_)-----OOOo---------------------------------- www.j-cook.pro --- +
* @version 2.5
* @package Cook Self Service
* @subpackage JDom
* @license GNU General Public License
* @author Jocelyn HUARD
*
* .oooO Oooo.
* ( ) ( )
* -------------\ (----) /----------------------------------------------------------- +
* \_) (_/
*/
// no direct access
defined( '_JEXEC' ) or die( 'Restricted access' );
class JDomHtmlFlyFileUrl extends JDomHtmlFlyFile
{
/*
* Constuctor
* @namespace : requested class
* @options : Configuration
* @dataKey : database field name
* @dataObject : complete object row (stdClass or Array)
* @dataValue : value default = dataObject->dataKey
* @indirect : Indirect File access
* @root : Default folder (alias : ex [DIR_TABLE_FIELD]) -> Need a parser (Cook helper)
* @width : Thumb width
* @height : Thumb height
* @preview : Preview type
* @href : Link on the file
* @target : Target of the link ('download', '_blank', 'modal', ...)
*
*
*/
function __construct($args)
{
parent::__construct($args);
}
function build()
{
$html = $this->href;
return $html;
}
}
|
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Immutable;
using System.Composition;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.Completion;
using Microsoft.CodeAnalysis.CSharp.AddImport;
using Microsoft.CodeAnalysis.CSharp.CodeFixes.GenerateMethod;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.SpellCheck;
namespace Microsoft.CodeAnalysis.CSharp.CodeFixes.Spellcheck
{
[ExportCodeFixProvider(LanguageNames.CSharp, Name = PredefinedCodeFixProviderNames.SpellCheck), Shared]
[ExtensionOrder(After = PredefinedCodeFixProviderNames.RemoveUnnecessaryCast)]
internal partial class CSharpSpellCheckCodeFixProvider : AbstractSpellCheckCodeFixProvider<SimpleNameSyntax>
{
private const string CS0426 = nameof(CS0426); // The type name '0' does not exist in the type '1'
public override ImmutableArray<string> FixableDiagnosticIds { get; } =
AddImportDiagnosticIds.FixableDiagnosticIds.Concat(
GenerateMethodDiagnosticIds.FixableDiagnosticIds).Concat(
ImmutableArray.Create(CS0426));
protected override bool ShouldSpellCheck(SimpleNameSyntax name)
=> !name.IsVar;
protected override bool DescendIntoChildren(SyntaxNode arg)
{
// Don't dive into type argument lists. We don't want to report spell checking
// fixes for type args when we're called on an outer generic type.
return !(arg is TypeArgumentListSyntax);
}
protected override bool IsGeneric(SimpleNameSyntax nameNode)
{
return nameNode is GenericNameSyntax;
}
protected override bool IsGeneric(CompletionItem completionItem)
{
return completionItem.DisplayText.Contains("<>");
}
protected override SyntaxToken CreateIdentifier(SimpleNameSyntax nameNode, string newName)
{
return SyntaxFactory.Identifier(newName).WithTriviaFrom(nameNode.Identifier);
}
}
}
|
using System.Reflection;
using System.Resources;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Windows;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("WpfOverlay")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("WpfOverlay")]
[assembly: AssemblyCopyright("Copyright © 2016")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
//In order to begin building localizable applications, set
//<UICulture>CultureYouAreCodingWith</UICulture> in your .csproj file
//inside a <PropertyGroup>. For example, if you are using US english
//in your source files, set the <UICulture> to en-US. Then uncomment
//the NeutralResourceLanguage attribute below. Update the "en-US" in
//the line below to match the UICulture setting in the project file.
//[assembly: NeutralResourcesLanguage("en-US", UltimateResourceFallbackLocation.Satellite)]
[assembly: ThemeInfo(
ResourceDictionaryLocation.None, //where theme specific resource dictionaries are located
//(used if a resource is not found in the page,
// or application resource dictionaries)
ResourceDictionaryLocation.SourceAssembly //where the generic resource dictionary is located
//(used if a resource is not found in the page,
// app, or any theme specific resource dictionaries)
)]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing';
import { MessagesComponent } from './messages.component';
import { MessageService } from '../message.service';
describe('MessagesComponent', () => {
let component: MessagesComponent;
let fixture: ComponentFixture<MessagesComponent>;
let messageService: MessageService;
beforeEach(waitForAsync(() => {
TestBed.configureTestingModule({
declarations: [MessagesComponent],
providers: [MessageService]
}).compileComponents();
messageService = TestBed.inject(MessageService);
}));
beforeEach(() => {
fixture = TestBed.createComponent(MessagesComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should be created', () => {
expect(component).toBeTruthy();
});
});
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import os
import json
import collections
# See setTranslationData.py
with open(os.path.join(os.path.dirname(__file__), 'json/translationInfo.json'), 'r') as f:
d = json.JSONDecoder(object_pairs_hook = collections.OrderedDict)
translationInfo = d.decode(f.read())
def isValidTranslation(xmlFilename, translationLocale, translator):
if translationLocale in translationInfo:
if xmlFilename in translationInfo[translationLocale]['canon']:
for localeXmlTranslation in translationInfo[translationLocale]['canon'][xmlFilename]:
if translationInfo[translationLocale]['source'][ localeXmlTranslation['source'] ][0] == translator.decode('utf-8'):
return True
return False
def getTranslatorSource(xmlFilename, translationLocale, translator):
if xmlFilename in translationInfo[translationLocale]['canon']:
for localeXmlTranslation in translationInfo[translationLocale]['canon'][xmlFilename]:
if translationInfo[translationLocale]['source'][ localeXmlTranslation['source'] ][0] == translator.decode('utf-8'):
return localeXmlTranslation['source']
raise Exception('cannot find translator source %s %s %s' % (xmlFilename, translationLocale, translator))
def getTranslator(translationLocale, localeXmlTranslation):
return translationInfo[translationLocale]['source'][ localeXmlTranslation['source'] ][0]
def getLocaleXmlTranslations(translationLocale, xmlFilename):
localeXmlTranslations = []
for localeXmlTranslation in translationInfo[translationLocale]['canon'][xmlFilename]:
tmp = { 'source': localeXmlTranslation['source'],
'translator': getTranslator(translationLocale, localeXmlTranslation) }
# check if only partial translation is available
if 'excerpt' in localeXmlTranslation:
tmp['excerpt'] = localeXmlTranslation['excerpt']
localeXmlTranslations.append(tmp)
return localeXmlTranslations
def getI18nLinksTemplateValues(xmlFilename):
i18nLinksTmpValue = { 'localeTranslations': [] }
for translationLocale in translationInfo:
localeTranslation = { 'translationLocale': translationLocale }
if xmlFilename in translationInfo[translationLocale]['canon']:
localeTranslation['localeXmlTranslations'] = \
getLocaleXmlTranslations(translationLocale, xmlFilename)
if 'localeXmlTranslations' in localeTranslation:
i18nLinksTmpValue['localeTranslations'].append(localeTranslation)
if len(i18nLinksTmpValue['localeTranslations']) > 0:
i18nLinksTmpValue['xmlFilename'] = xmlFilename
return i18nLinksTmpValue
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='coinor.grumpy',
version='0.95.1',
description='Graphics for Understanding Mathematical Programming (GrUMPy)',
long_description='''GrUMPy is a class for visualizing various algorithm used in solving discrete optimization problem. It has a class for dynamically generating and visualizing branch-and-bound trees that is derived from the GiMPy graph class. Using the branch-and-bound class, a user can visualize the branch-and-bound process in a number of different ways either by building the tree dynamically through direct calls to Python from the solver or by piping the output of an instrumented solver to GrUMPy for parsing. The branch-and-bound class also includes a pure Python implementation of branch and bound that is targeted at educational use.
In addition, GrUMPy includes a class for visualizing 2-dimensional polyhedra that can be used in combination with a pure Python implementation of the Gomory cutting plane algorithm to geometrically visualize the process of solving an integer program by a cutting plane algorithm. In future releases, the cutting plane visualization will be joined together with the branch-and-bound implementation to yield a full-blown visualization of the branch-and-cut algorithm.
A quick introduction with examples is available on Github:
https://github.com/coin-or/GrUMPy
Documentation for the API is here:
https://tkralphs.github.io/GrUMPy
''',
author='Aykut Bulut, Ted Ralphs',
author_email='ted@lehigh.edu',
license='Eclipse Public License',
url='https://github.com/tkralphs/GrUMPy/',
namespace_packages=['coinor'],
packages=[pkg.replace('src','coinor') for pkg in find_packages()],
package_data={'':['*.vbc']},
include_package_data=True,
package_dir = {'coinor': 'src'},
install_requires=['coinor.gimpy>=2.1.0', 'pulp']
)
|
import * as avalon from 'avalon2';
import 'mmRouter';
import { menu as menuStore } from './stores';
import * as navConfig from './nav.config.js';
var bootbox = require('bootbox');
function getPage(component) {
const html = `<xmp is="${component}" :widget="{id:'${component.replace(/\-/g, '_')}'}"></xmp>`;
return html
}
function applyRouteConfig(config, parentRoute, accPath = '') {
config.map(function (route) {
let components:any = {};
if (route.component) {
components.currentPage = route.component;
}
if (route.components) {// 没有对应属性
components = route.components;
}
// 20170818:增加国际化标识传惨/:locale
avalon.router.add(accPath + route.path+'/:locale', function () {
Object.keys(components).map(viewName => {
let component = components[viewName];
if (typeof component === 'function') {
component(function (m) {
menuStore.selectedKeys$.onNext([m.name]);
avalon.vmodels[parentRoute.name][viewName] = getPage(m.name);
});
} else {
avalon.vmodels[parentRoute.name][viewName] = getPage(component.name);
}
});
});
// TODO 支持嵌套路由
//route.children && applyRouteConfig(route.children, route, accPath + route.path);
});
}
const routeConfig = [];
const locale = avalon.vmodels.root.locale || 'zh-CN';
// 递归菜单的子元素赋值,routeConfig
const travel = item => {
if (!item.children || item.children.length === 0) {
routeConfig.push({
path: item.uri,
component: item.location
});
} else {
item.children.map(travel);
}
};
navConfig[locale].map(travel);
applyRouteConfig(routeConfig, {
name: 'root'
});
|
#! /usr/bin/env python
"""
This demonstrates a basic LEAP app that writes a string to a file.
Read the code, run the code (./run.py), then check leap/output for the output.
This app demonstrates:
* An App is a class that inherits from leap_app.App.
Being a child of leap_app.App automatically gives the app some useful features:
* self.settings automatically points to the settings object from settings.py
* self.create_output() automatically creates a directory in leap/output
with the app name and timestamp (e.g. 2013-05-09--13-26-19_introduction)
* settings.py is automatically copied to the output directory for future
reference
* leap/output is meant for scratch work, you usually keep it empty.
You run your app hundreds of times while developing, then every once
in a while clear out the directory when it gets annoying
* leap/long_term_output is where you copy the outputs you want to keep long term
* none of the contents of leap/output or leap/long_term_output are checked
into the repository
* self.out_path automatically points to the output directory that is created
by self.create_output()
* When you instantiate the app it will warn you if you violate PEP8 style guidelines
(try adding a space to the end of some line of code and running it again)
* self.profile() will call self.run() but with the profiler enabled
(try commenting app.run() and uncommenting app.profile())
* The app is a class that can be imported and instantiated by other apps, but if you
want to run the app directly you specify a __main__ block of code that instantiates
the app and runs it
"""
import os
from leap.lib.leap_app import leap_app
class IntroductionApp(leap_app.App):
def run(self):
self.create_output()
outfilename = os.path.join(self.out_path, "test.txt")
outfile = open(outfilename, "w")
print "writing \"%s\" to file" % self.settings.x
outfile.write(self.settings.x)
outfile.close()
if __name__ == "__main__":
app = IntroductionApp()
app.run()
#app.profile()
app.end()
|
/**
* Copyright (C) 2009-2012 Kenneth Prugh
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*/
package irc;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class IRCUtils {
/**
* @return user that sent this message
*
* @param message
* - The message to parse
*/
public static String getSender(String message) {
Pattern pattern = Pattern.compile(":(.*?)!");
Matcher matcher = pattern.matcher(message);
if (matcher.find()) {
return matcher.group(1);
} else {
return "";
}
}
/**
* @return user's ident that sent this message
*
* @param message
* - The message to parse
*/
public static String getIdent(String message) {
Pattern pattern = Pattern.compile("!~(.*?)@");
Matcher matcher = pattern.matcher(message);
if (matcher.find()) {
return matcher.group(1);
} else {
return "";
}
}
/**
* @return user's hostname that sent this message
*
* @param message
* - The message to parse
*/
public static String getHostname(String message) {
Pattern pattern = Pattern.compile("@(.*?) PRIVMSG");
Matcher matcher = pattern.matcher(message);
if (matcher.find()) {
return matcher.group(1);
} else {
return "";
}
}
/**
* @return - the channel (#foo)
*/
public static String getChannel(String message) {
Pattern pattern = Pattern.compile("PRIVMSG (.+?) :");
Matcher matcher = pattern.matcher(message);
if (matcher.find()) {
return matcher.group(1);
} else {
return "";
}
}
/**
* Return message contents from this message
*
* @param message
* - The message to parse
* @return - The message
*/
public static String getMessage(String message) {
Pattern pattern = Pattern.compile(":(.+? ):(.*)");
Matcher matcher = pattern.matcher(message);
if (matcher.find()) {
return matcher.group(2);
} else {
return "";
}
}
}
|
import os,re
import pandas as pd
#qtypes = ['which', 'what', 'why', 'when', 'where', 'how']
'''
add binary columns of iswhich, iswhat ... ishow.
to indicate which type of question it is
'''
def parse_questions(ques_type, df):
df['is' + ques_type] = df.apply(lambda x : 1 if re.sub('[^a-zA-Z]+', ' ', x['question']).lower().split().count(ques_type) > 0 else 0, axis=1)
#print df['is' + ques_type].sum()
return df
'''
chang the 'All of the above' choice to the concat
of all the other first choices
'''
def diambiguate_answer(df):
ndf = df
for index, row in df.iterrows():
key = 'All of the above'
result = ''
if key in row.values.tolist() :
if row['answerA'] != key :
result += ' ' + row['answerA']
else :
flag = 0
if row['answerB'] != key :
result += ' ' + row['answerB']
else :
flag = 1
if row['answerC'] != key :
result += ' ' + row['answerC']
else:
flag =2
if row['answerD'] != key :
result += ' ' + row['answerD']
else :
flag = 3
if flag == 0: aid = 'answerA'
if flag == 1: aid = 'answerB'
if flag == 2: aid = 'answerC'
if flag == 3: aid = 'answerD'
# chanage the item at index row and aid column
df.loc[index, aid] = result
print index
return df
# for ques_type in qtypes:
# df = parse_questions(ques_type, df)
def preprocess():
df = pd.DataFrame.from_csv('Data/validation_set.tsv', sep='\t', index_col=False)
df = diambiguate_answer(df)
df.to_csv('Data/newval.csv', index=False)
df = pd.DataFrame.from_csv('Data/training_set.tsv', sep='\t', index_col=False)
df = diambiguate_answer(df)
df.to_csv('Data/newtrain.csv', index=False)
preprocess()
|
/*
Copyright (C) 2012 - 2015 Evan Teran
evan.teran@gmail.com
Copyright (C) 1995-2003,2004,2005,2006,2007,2008,2009,2010,2011
Free Software Foundation, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef ELF_VERDEF_H_20121007_
#define ELF_VERDEF_H_20121007_
#include "elf_types.h"
/* Version definition sections. */
struct elf32_verdef {
elf32_half vd_version; /* Version revision */
elf32_half vd_flags; /* Version information */
elf32_half vd_ndx; /* Version Index */
elf32_half vd_cnt; /* Number of associated aux entries */
elf32_word vd_hash; /* Version name hash value */
elf32_word vd_aux; /* Offset in bytes to verdaux array */
elf32_word vd_next; /* Offset in bytes to next verdef entry */
};
struct elf64_verdef {
elf64_half vd_version; /* Version revision */
elf64_half vd_flags; /* Version information */
elf64_half vd_ndx; /* Version Index */
elf64_half vd_cnt; /* Number of associated aux entries */
elf64_word vd_hash; /* Version name hash value */
elf64_word vd_aux; /* Offset in bytes to verdaux array */
elf64_word vd_next; /* Offset in bytes to next verdef entry */
};
/* Legal values for vd_version (version revision). */
enum {
VER_DEF_NONE = 0, /* No version */
VER_DEF_CURRENT = 1, /* Current version */
VER_DEF_NUM = 2 /* Given version number */
};
/* Legal values for vd_flags (version information flags). */
enum {
VER_FLG_BASE = 0x1, /* Version definition of file itself */
VER_FLG_WEAK = 0x2 /* Weak version identifier */
};
/* Versym symbol index values. */
enum {
VER_NDX_LOCAL = 0, /* Symbol is local. */
VER_NDX_GLOBAL = 1, /* Symbol is global. */
VER_NDX_LORESERVE = 0xff00, /* Beginning of reserved entries. */
VER_NDX_ELIMINATE = 0xff01 /* Symbol is to be eliminated. */
};
#endif
|
#!/usr/bin/env python
"""
ConfigFiles.py $Id: ConfigFiles.py,v 1.9 2002/05/18 10:28:24 nordstrom Exp $
Defines easy access to the various .ini config files for PyPlucker.
Copyright 2000 by Holger Duerer <holly@starship.python.net>
Distributable under the GNU General Public License Version 2 or newer.
"""
import os, sys, types, string, ConfigParser
import PyPlucker
SYS_CONFIG_FILE = os.path.join (PyPlucker.lib_dir, PyPlucker._SYS_CONFIGFILE_NAME)
if os.environ.has_key ('HOME'):
USER_CONFIG_FILE = os.path.join (os.environ["HOME"], PyPlucker._USER_CONFIGFILE_NAME)
else:
# should not happen, but what's it like on those lesser operating systems?
USER_CONFIG_FILE = SYS_CONFIG_FILE
class _ConfigGetter:
"""A helper class to maintain one section in one config file"""
def __init__ (self, config, section):
self._config = config
self._section = section
def get_string (self, option):
try:
return self._config.get (self._section, option, raw=1)
except:
return None
def get_int (self, option):
return int (self.get_string (option))
class Configuration:
"""A Class to maintain information about all possivble user
settable options from various .ini config files."""
def __init__ (self, pluckerhome, pluckerdir, extra_sections=[], error_logger=None):
"""Load .ini files from all possible places and present one
unified view"""
self._configs = []
if sys.platform == 'win32':
self._sections = ['WINDOWS']
elif sys.platform == 'os2':
self._sections = ['POSIX', 'OS2']
else:
self._sections = ['POSIX']
self._sections = self._sections + extra_sections
self._dict = {}
self.maybe_load_config (SYS_CONFIG_FILE, error_logger)
self.maybe_load_config (USER_CONFIG_FILE, error_logger)
if pluckerhome:
self.maybe_load_config (os.path.join (pluckerhome, PyPlucker._USER_CONFIGFILE_NAME),
error_logger)
if pluckerdir:
self.maybe_load_config (os.path.join (pluckerdir, PyPlucker._USER_CONFIGFILE_NAME),
error_logger)
if pluckerhome:
self.set ('PLUCKERHOME', pluckerhome)
if pluckerdir:
self.set ('pluckerdir', pluckerdir)
def maybe_load_config (self, filename, error_logger):
"""Load all sections from config file 'filename'"""
if os.path.exists (filename):
# reverse the list, so that appends become prependes
self._configs.reverse ()
try:
c = ConfigParser.ConfigParser ()
c.read (filename)
for section in self._sections:
if c.has_section (section):
self._configs.append (_ConfigGetter (c, section))
except ConfigParser.Error, text:
if error_logger is not None:
error_logger ("Error parsing config file '%s': %s" % (filename, text))
pass
# reverse again...
self._configs.reverse ()
def _get_string (self, option):
if self._dict.has_key (option):
return self._dict[option]
aList = map (lambda x, o=option: x.get_string (o), self._configs)
result = reduce (lambda a, b: a or b, aList, None)
return result
## these should probably be re-written so that they always either raise
## an exception, or return a value of the specified type. In other words,
## re-written so that they never return None. -- wcj
def get_string (self, option, default=None):
if not (default is None):
assert (type(default) == types.StringType)
result = self._get_string(option)
if result is None:
return default
else:
return result
def get_int (self, option, default=None):
if not (default is None):
assert (type(default) == types.IntType or type(default) == types.LongType)
if self._dict.has_key (option):
return int (self._dict[option])
result = self._get_string (option)
if result is None:
return int (default)
else:
return int (result)
def get_bool (self, option, default=None):
res = self._get_string (option)
if res is None:
if default is None:
return None
else:
res = default
if type (res) == types.StringType:
res = string.lower (res)
if res == 1 or res == "1" or res == "y" or res == "yes" or res == "true" or res == "on":
return 1
if res == 0 or res == "0" or res == "n" or res == "no" or res == "false" or res == "off":
return 0
else:
raise RuntimeError("Illegal non-boolean value '%s' found for option '%s'" % (repr (res), option))
def set (self, option, value):
self._dict[option] = value
|
/****************************************************************************
*
* Copyright (C) 2012-2019 PX4 Development Team. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name PX4 nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
/**
* @file test_sleep.c
* Tests the px4_usleep() method.
*/
#include <px4_time.h>
#include <px4_config.h>
#include <px4_defines.h>
#include <sys/types.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include <arch/board/board.h>
#include "tests_main.h"
int test_sleep(int argc, char *argv[])
{
unsigned int nsleeps = 20;
printf("\t %d 100ms sleeps\n", nsleeps);
fflush(stdout);
for (unsigned int i = 0; i < nsleeps; i++) {
px4_usleep(100000);
}
printf("\t Sleep test successful.\n");
return OK;
}
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BLEU_TURQUIN = "rgb(66, 91, 138)";
exports.BLANC = "rgb(255, 255, 255)";
exports.NOIR = "rgb(0,0,0)";
exports.FOND_NOIR = "rgb(17, 17, 17)";
exports.GRIS_NOIR = "rgb(50, 50, 50)";
exports.ROUGE = "rgb(255, 0, 0)";
exports.JAUNE = "rgb(225, 225, 0)";
exports.VIOLET = "rgb(128, 0, 128)";
exports.BLEU_CANARD = "rgb(0, 128, 128)";
exports.BLEU_CIEL = "rgb(119, 181, 254)";
exports.VERT = "rgb(0, 225, 0)";
exports.GRIS = "rgb(135, 135, 135)";
exports.GRIS_CLAIR = "rgb(229, 229, 229)";
exports.TOUS = exports.GRIS;
exports.FOND = exports.GRIS_CLAIR;
exports.COULEUR_SEPARATION = exports.BLEU_CIEL;
exports.OMBRE_RECEPTION = exports.TOUS;
exports.OMBRE_EMISSION = exports.ROUGE;
//# sourceMappingURL=couleur.js.map
|
# Standard lib imports
# None
# Third party imports
# None
# Project level imports
# None
class ProtectionDomain(object):
def __init__(self, connection):
"""
Initialize a new instance
"""
self.conn = connection
@property
def get(self):
"""
Returns a `list` of all the `System` objects to the cluster. Updates every time - no caching.
:return: a `list` of all the `System` objects known to the cluster.
:rtype: list
"""
self.conn.connection._check_login()
response = self.conn.connection._do_get("{}/{}".format(self.conn.connection._api_url, "types/System/instances")).json()
all_system_objects = []
for system_object in response:
all_system_objects.append(self.conn.System.from_dict(system_object))
return all_system_objects
def get_pd_by_name(self, name):
"""
Get ScaleIO ProtectionDomain object by its name
:param name: Name of ProtectionDomain
:return: ScaleIO ProtectionDomain object
:raise KeyError: No ProtetionDomain with specified name found
:rtype: ProtectionDomain object
"""
for pd in self.conn.protection_domains:
if pd.name == name:
return pd
raise KeyError("Protection Domain NAME " + name + " not found")
def get_pd_by_id(self, id):
"""
Get ScaleIO ProtectionDomain object by its id
:param name: ID of ProtectionDomain
:return: ScaleIO ProctectionDomain object
:raise KeyError: No ProtectionDomain with specified name found
:rtype: ProtectionDomain object
"""
for pd in self.conn.protection_domains:
if pd.id == id:
return pd
raise KeyError("Protection Domain with ID " + id + " not found")
def create_protection_domain(self, pdObj, **kwargs):
# TODO:
# Check if object parameters are the correct ones
self.conn.connection._check_login()
response = self.conn.connection._do_post("{}/{}".format(self.conn.connection._api_url, "types/Volume/instances"), json=pdObj.__to_dict__()())
return response
def delete_potection_domain(self, pdObj):
"""
:param pdObj: ID of ProtectionDomain
type: POST
Required:
Protection Domain Object
Return:
"""
self.conn.connection._check_login()
response = self.conn.connection._do_post("{}/{}{}/{}".format(self.conn.connection._api_url, "instances/ProtectionDomain::", pdObj.id, 'action/removeProtectionDomain'))
return response
|
#!/usr/bin/env python
# --!-- coding: utf8 --!--
# The loadSave file calls the proper functions to load and save file
# trying to detect the proper file format if it comes from an older version
import os
import zipfile
import manuskript.load_save.version_0 as v0
import manuskript.load_save.version_1 as v1
import logging
LOGGER = logging.getLogger(__name__)
def saveProject(version=None):
# While debugging, we don't save the project
# return
if version == 0:
return v0.saveProject()
else:
return v1.saveProject()
def clearSaveCache():
v1.cache = {}
def loadProject(project):
# Detect version
isZip = False
version = 0
# Is it a zip?
try:
zf = zipfile.ZipFile(project)
isZip = True
except zipfile.BadZipFile:
isZip = False
# Does it have a VERSION in zip root?
# Was used in transition between 0.2.0 and 0.3.0
# So VERSION part can be deleted for manuskript 0.4.0
if isZip and "VERSION" in zf.namelist():
version = int(zf.read("VERSION"))
# Does it have a MANUSKRIPT in zip root?
elif isZip and "MANUSKRIPT" in zf.namelist():
version = int(zf.read("MANUSKRIPT"))
# Zip but no VERSION/MANUSKRIPT: oldest file format
elif isZip:
version = 0
# Not a zip
else:
with open(project, "r", encoding="utf-8") as f:
version = int(f.read())
LOGGER.info("Loading: %s", project)
LOGGER.info("Detected file format version: {}. Zip: {}.".format(version, isZip))
if version == 0:
v0.loadProject(project)
else:
v1.loadProject(project, zip=isZip)
|
/**
* Copyright (c) Andrew Swan 2008
*/
package com.andrewswan.lostcities.domain;
import junit.framework.TestCase;
/**
* Unit test of the {@link Expedition} class.
*
* @author Andrew
*/
public class ExpeditionTest extends TestCase {
// Constants
private static final Suit SUIT = Suit.GREEN; // arbitrary
// Fixture
private Expedition expedition;
@Override
protected void setUp() throws Exception {
super.setUp();
expedition = new Expedition(SUIT);
}
public void testEmptyExpedition() {
assertEquals(0, expedition.getValue());
assertEquals(0, expedition.size());
assertTrue(expedition.isEmpty());
assertNull(expedition.getTopCard());
for (final FaceValue value : FaceValue.values()) {
assertTrue(expedition.canAdd(value));
}
}
public void testGetPotentialValueWithTenAdded() {
// Set up
expedition.add(FaceValue.TEN);
final int startValue = expedition.getValue();
// Invoke
final int potentialValue = expedition.getPotentialValue(new Expedition(SUIT));
// Check
assertEquals(startValue, potentialValue);
}
}
|
from struct import pack, unpack
"""
This module contains functions for reading and writing the special data types
that a midi file contains.
"""
"""
nibbles are four bits. A byte consists of two nibles.
hiBits==0xF0, loBits==0x0F Especially used for setting
channel and event in 1. byte of musical midi events
"""
def getNibbles(byte):
"""
Returns hi and lo bits in a byte as a tuple
>>> getNibbles(142)
(8, 14)
Asserts byte value in byte range
>>> getNibbles(256)
Traceback (most recent call last):
...
ValueError: Byte value out of range 0-255: 256
"""
if not 0 <= byte <= 255:
raise ValueError('Byte value out of range 0-255: %s' % byte)
return (byte >> 4 & 0xF, byte & 0xF)
def setNibbles(hiNibble, loNibble):
"""
Returns byte with value set according to hi and lo bits
Asserts hiNibble and loNibble in range(16)
>>> setNibbles(8, 14)
142
>>> setNibbles(8, 16)
Traceback (most recent call last):
...
ValueError: Nible value out of range 0-15: (8, 16)
"""
if not (0 <= hiNibble <= 15) or not (0 <= loNibble <= 15):
raise ValueError('Nible value out of range 0-15: (%s, %s)' % (hiNibble, loNibble))
return (hiNibble << 4) + loNibble
def readBew(value):
return unpack('>%s' % {1:'B', 2:'H', 4:'L'}[len(value)], value)[0]
def writeBew(value, length):
"""
Write int as big endian formatted string, (asserts length in [1,2,4])
Difficult to print the result in doctest, so I do a simple roundabout test.
>>> readBew(writeBew(25057, 2))
25057
>>> readBew(writeBew(1642193635L, 4))
1642193635L
"""
return pack('>%s' % {1:'B', 2:'H', 4:'L'}[length], value)
"""
Variable Length Data (varlen) is a data format sprayed liberally throughout
a midi file. It can be anywhere from 1 to 4 bytes long.
If the 8'th bit is set in a byte another byte follows. The value is stored
in the lowest 7 bits of each byte. So max value is 4x7 bits = 28 bits.
"""
def readVar(value):
sum = 0
for byte in unpack('%sB' % len(value), value):
sum = (sum << 7) + (byte & 0x7F)
if not 0x80 & byte: break # stop after last byte
return sum
def varLen(value):
"""
Returns the the number of bytes an integer will be when
converted to varlength
"""
if value <= 127:
return 1
elif value <= 16383:
return 2
elif value <= 2097151:
return 3
else:
return 4
def writeVar(value):
"Converts an integer to varlength format"
sevens = to_n_bits(value, varLen(value))
for i in range(len(sevens)-1):
sevens[i] = sevens[i] | 0x80
return fromBytes(sevens)
def to_n_bits(value, length=1, nbits=7):
"returns the integer value as a sequence of nbits bytes"
bytes = [(value >> (i*nbits)) & 0x7F for i in range(length)]
bytes.reverse()
return bytes
def toBytes(value):
"Turns a string into a list of byte values"
return unpack('%sB' % len(value), value)
def fromBytes(value):
"Turns a list of bytes into a string"
if not value:
return ''
return pack('%sB' % len(value), *value)
|
<?php
$currentAppLang = Yii::app()->getLanguage();
if (!$this->positionHasData(1, "sideColumn")) {
$news = new ArticlesListData(array('news'), 0, 6);
$news->addColumn('publish_date');
$news->addOrder('publish_date desc');
$news->generate();
$newsList = $news->getItems();
$this->setPositionData(1, $this->widget('widgets.NewsSideList', array('items' => $newsList, "title" => AmcWm::t("amcFront", 'News Center')), true), "sideColumn");
}
if (!$this->positionHasData(2, "sideColumn")) {
$newsletterWidget = AmcWm::app()->executeWidget(
"amcwm.modules.maillist.frontend.components.ExecuteSubscribe",
array('widget' => "ext.NewsletterWidget"),
array('id' => 'newsletter_frm', 'title'=>AmcWm::t("app", 'Subscribe in Newsletter')),
true
);
$this->setPositionData(2, $newsletterWidget, "sideColumn");
}
|
from django.conf.urls import url, include
from rest_framework.urlpatterns import format_suffix_patterns
from rest_framework_jwt.views import obtain_jwt_token, refresh_jwt_token
from api import views
urlpatterns = format_suffix_patterns([
# swagger documentation url
url(r'^docs/', include('rest_framework_swagger.urls')),
# login and register url
url(r'^auth/register$',
views.UserList.as_view(),
name='user-register'),
# url(r'^auth/login/',
# authtoken_views.obtain_auth_token,
# name='user-login'),
url(r'^auth/login', obtain_jwt_token, name='user-login'),
url(r'^auth/api-token-refresh/', refresh_jwt_token),
url(r'^users/(?P<pk>[0-9]+)/$',
views.UserDetail.as_view(),
name='user-detail'),
# bucketlist related urls
url(r'^bucketlists/$',
views.BucketListAll.as_view(),
name='bucketlist-list'),
url(r'^bucketlists/(?P<pk>[0-9]+)$',
views.BucketListDetail.as_view(),
name='bucketlist-detail'),
# bucketlist item related urls
url(r'^bucketlists/(?P<bucketlist>[0-9]+)/items/$',
views.BucketListItemAll.as_view(),
name='item-list'),
url(r'^bucketlists/(?P<bucketlist>[0-9]+)/items/'
'(?P<pk>[0-9]+)$', views.BucketListItemDetail.as_view(),
name='item-detail'),
])
|
# coding: utf-8
"""
MailMojo API
v1 of the MailMojo API # noqa: E501
OpenAPI spec version: 1.1.0
Contact: hjelp@mailmojo.no
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import mailmojo_sdk
from mailmojo_sdk.api.account_api import AccountApi # noqa: E501
from mailmojo_sdk.rest import ApiException
class TestAccountApi(unittest.TestCase):
"""AccountApi unit test stubs"""
def setUp(self):
self.api = mailmojo_sdk.api.account_api.AccountApi() # noqa: E501
def tearDown(self):
pass
def test_create_account(self):
"""Test case for create_account
Create an account. # noqa: E501
"""
pass
def test_get_account_by_username(self):
"""Test case for get_account_by_username
Retrieve account details. # noqa: E501
"""
pass
def test_get_domain(self):
"""Test case for get_domain
Retrieve domain details and authentication status. # noqa: E501
"""
pass
def test_update_account(self):
"""Test case for update_account
Update account details. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
<?php
/**
* The contents of this file was generated using the WSDLs as provided by eBay.
*
* DO NOT EDIT THIS FILE!
*/
namespace DTS\eBaySDK\Trading\Types;
/**
*
*/
class BidGroupArrayType extends \DTS\eBaySDK\Types\BaseType
{
/**
* @var array Properties belonging to objects of this class.
*/
private static $propertyTypes = [
];
/**
* @param array $values Optional properties and values to assign to the object.
*/
public function __construct(array $values = [])
{
list($parentValues, $childValues) = self::getParentValues(self::$propertyTypes, $values);
parent::__construct($parentValues);
if (!array_key_exists(__CLASS__, self::$properties)) {
self::$properties[__CLASS__] = array_merge(self::$properties[get_parent_class()], self::$propertyTypes);
}
if (!array_key_exists(__CLASS__, self::$xmlNamespaces)) {
self::$xmlNamespaces[__CLASS__] = 'xmlns="urn:ebay:apis:eBLBaseComponents"';
}
$this->setValues(__CLASS__, $childValues);
}
}
|
/*******************************************************************************
* Copyright (c) 2011, 2012 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Denise Smith - 2.4
******************************************************************************/
package org.eclipse.persistence.testing.jaxb.json.namespaces;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.persistence.jaxb.JAXBContextProperties;
import org.eclipse.persistence.testing.jaxb.json.JSONMarshalUnmarshalTestCases;
public class NamespacesOnContextTestCases extends JSONMarshalUnmarshalTestCases{
private final static String JSON_RESOURCE = "org/eclipse/persistence/testing/jaxb/json/namespaces/person.json";
public NamespacesOnContextTestCases(String name) throws Exception {
super(name);
setControlJSON(JSON_RESOURCE);
setClasses(new Class[]{Person.class});
}
protected Object getControlObject() {
Person p = new Person();
p.setId(10);
p.setFirstName("Jill");
p.setLastName("MacDonald");
List<String> middleNames = new ArrayList<String>();
middleNames.add("Jane");
middleNames.add("Janice");
p.setMiddleNames(middleNames);
Address addr = new Address();
addr.setStreet("The Street");
addr.setCity("Ottawa");
p.setAddress(addr);
return p;
}
public Map getProperties(){
Map props = new HashMap();
props.put(JAXBContextProperties.JSON_ATTRIBUTE_PREFIX, "@");
Map<String, String> namespaceMap = new HashMap<String, String>();
namespaceMap.put("namespace0", "ns0");
namespaceMap.put("namespace1", "ns1");
namespaceMap.put("namespace2", "ns2");
namespaceMap.put("namespace3", "ns3");
props.put(JAXBContextProperties.NAMESPACE_PREFIX_MAPPER, namespaceMap);
return props;
}
}
|
"""
Copyright (C) <2010> Autin L.
This file ePMV_git/demo/renderMaya.py is part of ePMV.
ePMV is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ePMV is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with ePMV. If not, see <http://www.gnu.org/licenses/gpl-3.0.html>.
"""
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 8 11:48:14 2011
@author: -
"""
from maya import mel
from maya import cmds
startFrame = cmds.getAttr("defaultRenderGlobals.startFrame")
endFrame = cmds.getAttr("defaultRenderGlobals.endFrame")
gMainProgressBar = maya.mel.eval('$tmp = $gMainProgressBar');
cmds.progressBar( gMainProgressBar,
edit=True,
beginProgress=True,
isInterruptable=True,
status="Rendering...",
maxValue=endFrame)
for i in range(startFrame,endFrame):
if cmds.progressBar(gMainProgressBar, query=True, isCancelled=True ) :
break
cmds.currentTime( i )
cmds.render()
cmds.progressBar(gMainProgressBar, edit=True, step=1)
cmds.progressBar(gMainProgressBar, edit=True, endProgress=True)
print ("Completed rendering of " + str((endFrame - startFrame) + 1) + " frames.\n");
|
sofia.resources['jw'] = {
"translation": {
"name": "Javanese",
"menu": {
"search": {
"placeholder": "Panelusuran"
},
"config": {
"font": "Font",
"settings": "Setelan",
"tools": "Pribadi"
},
"reset": "ngreset",
"themes": {
"default": "Normal",
"sepia": "Sepia",
"dark": "Kurang cahya"
},
"labels": {
"about": "About",
"addwindow": "Tambah Window",
"settings": "Setelan",
"feedback": "Komentar",
"options": "Pilihan"
},
"feedback": {
"name": "Jeneng",
"email": "Email",
"comments": "Komentar Panjenengan",
"feature": "Fitur Suggestion",
"bug": "Bug Report",
"other": "Liyane",
"send": "Kirimi",
"thankyou": "Matur nuwun kanggo saran"
}
},
"plugins": {
"visualfilters": {
"button": "Visual Filter-filter",
"title": "Visual Filter-filter",
"newfilter": "Nyaring anyar",
"strongsnumber": "Kuwat iku #",
"morphology": "Morfologi",
"style": "Gaya"
},
"eng2p": {
"button": "English 2 Person jamak",
"title": "English kapindho Person jamak"
},
"lemmapopup": {
"findalloccurrences": "Nemokake kabeh manéka (kira-kira __count__)"
}
},
"windows": {
"bible": {
"label": "Kitab Suci",
"filter": "Nyaring ...",
"ot": "Prajanjian Lawas",
"nt": "Prajanjian Anyar",
"dc": "Deuterocanonical Books",
"more": "More",
"less": "Kurang",
"recentlyused": "Bubar digunakake",
"languages": "Basa",
"countries": "Negara"
},
"commentary": {
"label": "Katrangan"
},
"map": {
"label": "Maps",
"placeholder": "Panelusuran ..."
},
"search": {
"label": "Panelusuran",
"placeholder": "Panelusuran",
"button": "Panelusuran",
"results": "Asil",
"verses": "ayat",
"options": "Search Pilihan"
},
"media": {
"label": "Media"
},
"notes": {
"label": "Cathetan"
},
"audio": {
"options": "Audio Pilihan",
"synctext": "Tèks sink (beta)",
"autoplay": "Autoplay Sabanjure",
"drama": "Drama",
"nondrama": "Non-Drama"
},
"parallel": {
"label": "Paralel",
"loading": "Loading ...",
"showall": "Tampilake Kabeh",
"hideall": "Singidaken Kabeh"
},
"comparison": {
"label": "Comparison"
}
},
"names": {
"en": "Javanese"
}
}
}
|
"""
Example of a Beta distribution
------------------------------
Figure 3.17.
This shows an example of a beta distribution with various parameters.
We'll generate the distribution using::
dist = scipy.stats.beta(...)
Where ... should be filled in with the desired distribution parameters
Once we have defined the distribution parameters in this way, these
distribution objects have many useful methods; for example:
* ``dist.pmf(x)`` computes the Probability Mass Function at values ``x``
in the case of discrete distributions
* ``dist.pdf(x)`` computes the Probability Density Function at values ``x``
in the case of continuous distributions
* ``dist.rvs(N)`` computes ``N`` random variables distributed according
to the given distribution
Many further options exist; refer to the documentation of ``scipy.stats``
for more details.
"""
# Author: Jake VanderPlas
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
import numpy as np
from scipy.stats import beta
from matplotlib import pyplot as plt
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
#------------------------------------------------------------
# Define the distribution parameters to be plotted
alpha_values = [0.5, 1.5, 3.0, 0.5]
beta_values = [0.5, 1.5, 3.0, 1.5]
linestyles = ['-', '--', ':', '-.']
x = np.linspace(0, 1, 1002)[1:-1]
#------------------------------------------------------------
# plot the distributions
fig, ax = plt.subplots(figsize=(5, 3.75))
for a, b, ls in zip(alpha_values, beta_values, linestyles):
dist = beta(a, b)
plt.plot(x, dist.pdf(x), ls=ls, c='black',
label=r'$\alpha=%.1f,\ \beta=%.1f$' % (a, b))
plt.xlim(0, 1)
plt.ylim(0, 3)
plt.xlabel('$x$')
plt.ylabel(r'$p(x|\alpha,\beta)$')
plt.title('Beta Distribution')
plt.legend(loc=0)
plt.show()
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Logging handler for Google Container Engine (GKE).
Formats log messages in a JSON format, so that Kubernetes clusters with the
fluentd Google Cloud plugin installed can format their log messages so that
metadata such as log level is properly captured.
"""
import logging.handlers
from google.cloud.logging.handlers._helpers import format_stackdriver_json
class ContainerEngineHandler(logging.StreamHandler):
"""Handler to format log messages the format expected by GKE fluent.
This handler is written to format messages for the Google Container Engine
(GKE) fluentd plugin, so that metadata such as log level are properly set.
"""
def format(self, record):
"""Format the message into JSON expected by fluentd.
:type record: :class:`~logging.LogRecord`
:param record: the log record
:rtype: str
:returns: A JSON string formatted for GKE fluentd.
"""
message = super(ContainerEngineHandler, self).format(record)
return format_stackdriver_json(record, message)
|
import sys
import time
import telepot
import telepot.namedtuple
from telepot.loop import MessageLoop
"""
$ python2.7 emodi.py <token>
Emodi: An Emoji Unicode Decoder - You send it some emoji, it tells you the unicodes.
Caution: Python's treatment of unicode characters longer than 2 bytes (which
most emojis are) varies across versions and platforms. I have tested this program
on Python2.7.9/Raspbian. If you try it on other versions/platforms, the length-
checking and substring-extraction below may not work as expected.
"""
def handle(msg):
content_type, chat_type, chat_id = telepot.glance(msg)
m = telepot.namedtuple.Message(**msg)
if chat_id < 0:
# group message
print 'Received a %s from %s, by %s' % (content_type, m.chat, m.from_)
else:
# private message
print 'Received a %s from %s' % (content_type, m.chat) # m.chat == m.from_
if content_type == 'text':
reply = ''
# For long messages, only return the first 10 characters.
if len(msg['text']) > 10:
reply = u'First 10 characters:\n'
# Length-checking and substring-extraction may work differently
# depending on Python versions and platforms. See above.
reply += msg['text'][:10].encode('unicode-escape').decode('ascii')
bot.sendMessage(chat_id, reply)
TOKEN = sys.argv[1] # get token from command-line
bot = telepot.Bot(TOKEN)
MessageLoop(bot, handle).run_as_thread()
print 'Listening ...'
# Keep the program running.
while 1:
time.sleep(10)
|
#include "NativeFeatureIncludes.h"
#if _RAKNET_SUPPORT_NatTypeDetectionClient==1
#include "NatTypeDetectionClient.h"
#include "RakNetSocket.h"
#include "RakNetSmartPtr.h"
#include "BitStream.h"
#include "SocketIncludes.h"
#include "RakString.h"
#include "RakPeerInterface.h"
#include "MessageIdentifiers.h"
#include "SocketLayer.h"
using namespace RakNet;
STATIC_FACTORY_DEFINITIONS(NatTypeDetectionClient,NatTypeDetectionClient);
NatTypeDetectionClient::NatTypeDetectionClient()
{
c2=INVALID_SOCKET;
}
NatTypeDetectionClient::~NatTypeDetectionClient()
{
if (c2!=INVALID_SOCKET)
{
closesocket(c2);
}
}
void NatTypeDetectionClient::DetectNATType(SystemAddress _serverAddress)
{
if (IsInProgress())
return;
if (c2==INVALID_SOCKET)
{
DataStructures::List<RakNetSmartPtr<RakNetSocket> > sockets;
rakPeerInterface->GetSockets(sockets);
SystemAddress sockAddr = SocketLayer::GetSystemAddress(sockets[0]->s);
char str[64];
sockAddr.ToString(false,str);
c2=CreateNonblockingBoundSocket(str);
c2Port=SocketLayer::Instance()->GetLocalPort(c2);
}
serverAddress=_serverAddress;
RakNet::BitStream bs;
bs.Write((unsigned char)ID_NAT_TYPE_DETECTION_REQUEST);
bs.Write(true); // IsRequest
bs.Write(c2Port);
rakPeerInterface->Send(&bs,MEDIUM_PRIORITY,RELIABLE,0,serverAddress,false);
}
void NatTypeDetectionClient::OnCompletion(NATTypeDetectionResult result)
{
Packet *p = rakPeerInterface->AllocatePacket(sizeof(MessageID)+sizeof(unsigned char)*2);
printf("Returning nat detection result to the user\n");
p->data[0]=ID_NAT_TYPE_DETECTION_RESULT;
p->systemAddress=serverAddress;
p->systemAddress.systemIndex=(SystemIndex)-1;
p->guid=rakPeerInterface->GetGuidFromSystemAddress(serverAddress);
p->data[1]=(unsigned char) result;
rakPeerInterface->PushBackPacket(p, true);
// Symmetric and port restricted are determined by server, so no need to notify server we are done
if (result!=NAT_TYPE_PORT_RESTRICTED && result!=NAT_TYPE_SYMMETRIC)
{
// Otherwise tell the server we got this message, so it stops sending tests to us
RakNet::BitStream bs;
bs.Write((unsigned char)ID_NAT_TYPE_DETECTION_REQUEST);
bs.Write(false); // Done
rakPeerInterface->Send(&bs,HIGH_PRIORITY,RELIABLE,0,serverAddress,false);
}
Shutdown();
}
bool NatTypeDetectionClient::IsInProgress(void) const
{
return serverAddress!=UNASSIGNED_SYSTEM_ADDRESS;
}
void NatTypeDetectionClient::Update(void)
{
if (IsInProgress())
{
char data[ MAXIMUM_MTU_SIZE ];
int len;
SystemAddress sender;
len=NatTypeRecvFrom(data, c2, sender);
if (len==1 && data[0]==NAT_TYPE_NONE && sender==serverAddress)
{
OnCompletion(NAT_TYPE_NONE);
RakAssert(IsInProgress()==false);
}
}
}
PluginReceiveResult NatTypeDetectionClient::OnReceive(Packet *packet)
{
if (IsInProgress() && packet->systemAddress==serverAddress)
{
switch (packet->data[0])
{
case ID_OUT_OF_BAND_INTERNAL:
{
if (packet->length>=3 && packet->data[1]==ID_NAT_TYPE_DETECT)
{
OnCompletion((NATTypeDetectionResult)packet->data[2]);
return RR_STOP_PROCESSING_AND_DEALLOCATE;
}
}
break;
case ID_NAT_TYPE_DETECTION_RESULT:
OnCompletion((NATTypeDetectionResult)packet->data[1]);
return RR_STOP_PROCESSING_AND_DEALLOCATE;
case ID_NAT_TYPE_DETECTION_REQUEST:
OnTestPortRestricted(packet);
return RR_STOP_PROCESSING_AND_DEALLOCATE;
}
}
return RR_CONTINUE_PROCESSING;
}
void NatTypeDetectionClient::OnClosedConnection(SystemAddress systemAddress, RakNetGUID rakNetGUID, PI2_LostConnectionReason lostConnectionReason )
{
(void) lostConnectionReason;
(void) rakNetGUID;
if (IsInProgress() && systemAddress==serverAddress)
Shutdown();
}
void NatTypeDetectionClient::OnTestPortRestricted(Packet *packet)
{
RakNet::BitStream bsIn(packet->data,packet->length,false);
bsIn.IgnoreBytes(sizeof(MessageID));
RakNet::RakString s3p4StrAddress;
bsIn.Read(s3p4StrAddress);
unsigned short s3p4Port;
bsIn.Read(s3p4Port);
SystemAddress s3p4Addr(s3p4StrAddress.C_String(), s3p4Port);
DataStructures::List<RakNetSmartPtr<RakNetSocket> > sockets;
rakPeerInterface->GetSockets(sockets);
// Send off the RakNet socket to the specified address, message is unformatted
// Server does this twice, so don't have to unduly worry about packetloss
RakNet::BitStream bsOut;
bsOut.Write((MessageID) NAT_TYPE_PORT_RESTRICTED);
bsOut.Write(rakPeerInterface->GetGuidFromSystemAddress(UNASSIGNED_SYSTEM_ADDRESS));
SocketLayer::Instance()->SendTo_PC( sockets[0]->s, (const char*) bsOut.GetData(), bsOut.GetNumberOfBytesUsed(), s3p4Addr.binaryAddress, s3p4Addr.port);
}
void NatTypeDetectionClient::Shutdown(void)
{
serverAddress=UNASSIGNED_SYSTEM_ADDRESS;
if (c2!=INVALID_SOCKET)
{
closesocket(c2);
c2=INVALID_SOCKET;
}
}
#endif // _RAKNET_SUPPORT_*
|
namespace Microsoft.Protocols.TestSuites.MS_OXWSSYNC
{
using System.Xml.XPath;
using Microsoft.Protocols.TestSuites.Common;
using Microsoft.Protocols.TestTools;
/// <summary>
/// The adapter interface which provides methods CreateItem, DeleteItem, GetItem and UpdateItem defined in MS-OXWSCORE.
/// </summary>
public interface IMS_OXWSCOREAdapter : IAdapter
{
/// <summary>
/// Gets the raw XML request sent to protocol SUT.
/// </summary>
IXPathNavigable LastRawRequestXml { get; }
/// <summary>
/// Gets the raw XML response received from protocol SUT.
/// </summary>
IXPathNavigable LastRawResponseXml { get; }
/// <summary>
/// Create items on the server.
/// </summary>
/// <param name="createItemRequest">Specify a request to create items on the server.</param>
/// <returns>A response to this operation request.</returns>
CreateItemResponseType CreateItem(CreateItemType createItemRequest);
/// <summary>
/// Delete items on the server.
/// </summary>
/// <param name="deleteItemRequest">Specify a request to delete item on the server.</param>
/// <returns>A response to this operation request.</returns>
DeleteItemResponseType DeleteItem(DeleteItemType deleteItemRequest);
/// <summary>
/// Get items on the server.
/// </summary>
/// <param name="getItemRequest">Specify a request to get items on the server.</param>
/// <returns>A response to this operation request.</returns>
GetItemResponseType GetItem(GetItemType getItemRequest);
/// <summary>
/// Update items on the server.
/// </summary>
/// <param name="updateItemRequest">Specify a request to update items on the server.</param>
/// <returns>A response to this operation request.</returns>
UpdateItemResponseType UpdateItem(UpdateItemType updateItemRequest);
}
}
|
# Functions that already have the correct syntax or miscellaneous functions
__all__ = ['sort', 'copy_reg', 'clip', 'rank',
'sign', 'shape', 'types', 'allclose', 'size',
'choose', 'swapaxes', 'array_str',
'pi', 'math', 'concatenate', 'putmask', 'put',
'around', 'vdot', 'transpose', 'array2string', 'diagonal',
'searchsorted', 'copy', 'resize',
'array_repr', 'e', 'StringIO', 'pickle',
'argsort', 'convolve', 'cross_correlate',
'dot', 'outerproduct', 'innerproduct', 'insert']
import types
import StringIO
import pickle
import math
import copy
import copy_reg
import sys
if sys.version_info[0] >= 3:
import copyreg
import io
StringIO = io.BytesIO
copy_reg = copyreg
from numpy import sort, clip, rank, sign, shape, putmask, allclose, size,\
choose, swapaxes, array_str, array_repr, e, pi, put, \
resize, around, concatenate, vdot, transpose, \
diagonal, searchsorted, argsort, convolve, dot, \
outer as outerproduct, inner as innerproduct, \
correlate as cross_correlate, \
place as insert
from array_printer import array2string
|
#!/usr/bin/env python3
# Copyright © 2012-13 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. It is provided for educational
# purposes and is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
class Bag:
def __init__(self, items=None):
"""
>>> bag = Bag(list("ABCDEB"))
>>> bag.count("A"), bag.count("B"), bag.count("Z")
(1, 2, 0)
"""
self.__bag = {}
if items is not None:
for item in items:
self.add(item)
def clear(self):
"""
>>> bag = Bag(list("ABCDEB"))
>>> bag.count("A"), bag.count("B"), bag.count("Z")
(1, 2, 0)
>>> len(bag) # "Z" was added but count of 0 doesn't count in len
6
>>> bag.clear()
>>> len(bag)
0
"""
self.__bag.clear()
def add(self, item):
"""
>>> bag = Bag(list("ABCDEB"))
>>> bag.add("B")
>>> bag.add("X")
>>> bag.count("A"), bag.count("B"), bag.count("Z")
(1, 3, 0)
"""
self.__bag[item] = self.__bag.get(item, 0) + 1
def __delitem__(self, item):
"""
>>> bag = Bag(list("ABCDEB"))
>>> print(len(bag))
6
>>> del bag["B"]
>>> del bag["Z"]
Traceback (most recent call last):
...
KeyError: 'Z'
>>> bag.count("A"), bag.count("B"), bag.count("Z")
(1, 1, 0)
>>> del bag["B"]
>>> bag.count("A"), bag.count("B"), bag.count("Z")
(1, 0, 0)
>>> del bag["C"], bag["D"], bag["E"]
>>> print(len(bag))
1
>>> del bag["A"]
>>> print(len(bag))
0
>>> del bag["A"]
Traceback (most recent call last):
...
KeyError: 'A'
"""
if self.__bag.get(item) is not None:
self.__bag[item] -= 1
if self.__bag[item] <= 0:
del self.__bag[item]
else:
raise KeyError(str(item))
def count(self, item):
"""
>>> bag = Bag(list("ABCDEB"))
>>> bag.count("B"), bag.count("X")
(2, 0)
"""
return self.__bag.get(item, 0)
def __len__(self):
"""
>>> bag = Bag(list("ABCDEB"))
>>> len(bag)
6
>>> bag.add("B")
>>> len(bag)
7
>>> bag.add("X")
>>> len(bag)
8
>>> bag.add("B")
>>> len(bag)
9
>>> del bag["Z"]
Traceback (most recent call last):
...
KeyError: 'Z'
>>> len(bag)
9
>>> del bag["A"]
>>> len(bag)
8
>>> for _ in range(4): del bag["B"]
>>> len(bag)
4
>>> bag.clear()
>>> len(bag)
0
"""
return sum(count for count in self.__bag.values())
def __iter__(self):
"""
>>> bag = Bag(list("DABCDEBCCDD"))
>>> for key in sorted(bag.items()): print(key, end=" ")
A B B C C C D D D D E
>>> for key in sorted(bag): print(key, end=" ")
A B B C C C D D D D E
>>> for _ in range(4): del bag["D"]
>>> for key in sorted(bag.items()): print(key, end=" ")
A B B C C C E
>>> for key in sorted(bag): print(key, end=" ")
A B B C C C E
>>> del bag["A"]
>>> for key in sorted(bag.items()): print(key, end=" ")
B B C C C E
>>> for key in sorted(bag): print(key, end=" ")
B B C C C E
"""
for item, count in self.__bag.items():
for _ in range(count):
yield item
items = __iter__
def __contains__(self, item):
"""
>>> bag = Bag(list("DABCDEBCCDD"))
>>> "D" in bag
True
>>> del bag["D"]
>>> "D" in bag
True
>>> del bag["D"]
>>> "D" in bag
True
>>> del bag["D"]
>>> "D" in bag
True
>>> del bag["D"]
>>> "D" in bag
False
>>> "X" in bag
False
"""
return item in self.__bag
if __name__ == "__main__":
import doctest
doctest.testmod()
|
<!-- Copyright 2008 Lubomir Bourdev and Hailin Jin
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
-->
<!--
Copyright 2005-2007 Adobe Systems Incorporated
Distributed under the MIT License (see accompanying file LICENSE_1_0_0.txt
or a copy at http://stlab.adobe.com/licenses.html)
Some files are held under additional license.
Please see "http://stlab.adobe.com/licenses.html" for more information.
-->
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
<TITLE>Generic Image Library: channel_converter_unsigned</TITLE>
<META HTTP-EQUIV="content-type" CONTENT="text/html;charset=ISO-8859-1"/>
<LINK TYPE="text/css" REL="stylesheet" HREF="adobe_source.css"/>
</head>
<body>
<table border="0" cellspacing="0" cellpadding="0" style='width: 100%; margin: 0; padding: 0'><tr>
<td width="100%" valign="top" style='padding-left: 10px; padding-right: 10px; padding-bottom: 10px'>
<div class="qindex"><a class="qindex" href="index.html">Modules</a>
| <a class="qindex" href="classes.html">Alphabetical List</a>
| <a class="qindex" href="annotated.html">Class List</a>
| <a class="qindex" href="dirs.html">Directories</a>
| <a class="qindex" href="files.html">File List</a>
| <a class="qindex" href="../index.html">GIL Home Page</a>
</div>
<!-- End Header -->
<!-- Generated by Doxygen 1.5.6 -->
<div class="contents">
<h1>channel_converter_unsigned<br>
<small>
[<a class="el" href="g_i_l_0099.html">channel_convert</a>]</small>
</h1><hr><a name="_details"></a><h2>Detailed Description</h2>
Convert one unsigned/floating point channel to another. Converts both the channel type and range.
<p>
<p>
<table border="0" cellpadding="0" cellspacing="0">
<tr><td></td></tr>
<tr><td colspan="2"><br><h2>Classes</h2></td></tr>
<tr><td class="memItemLeft" nowrap align="right" valign="top">struct </td><td class="memItemRight" valign="bottom"><a class="el" href="g_i_l_0410.html">channel_converter_unsigned< T, T ></a></td></tr>
<tr><td class="mdescLeft"> </td><td class="mdescRight">Converting a channel to itself - identity operation. <a href="g_i_l_0410.html#_details">More...</a><br></td></tr>
<tr><td class="memItemLeft" nowrap align="right" valign="top">struct </td><td class="memItemRight" valign="bottom"><a class="el" href="g_i_l_0414.html">channel_converter_unsigned< bits32f, DstChannelV ></a></td></tr>
<tr><td class="memItemLeft" nowrap align="right" valign="top">struct </td><td class="memItemRight" valign="bottom"><a class="el" href="g_i_l_0412.html">channel_converter_unsigned< bits32, bits32f ></a></td></tr>
<tr><td class="mdescLeft"> </td><td class="mdescRight">32 bit <-> float channel conversion <a href="g_i_l_0412.html#_details">More...</a><br></td></tr>
<tr><td class="memItemLeft" nowrap align="right" valign="top">struct </td><td class="memItemRight" valign="bottom"><a class="el" href="g_i_l_0416.html">channel_converter_unsigned< bits32f, bits32 ></a></td></tr>
<tr><td class="mdescLeft"> </td><td class="mdescRight">32 bit <-> float channel conversion <a href="g_i_l_0416.html#_details">More...</a><br></td></tr>
</table>
</div>
<hr size="1"><address style="text-align: right;"><small>Generated on Sat May 2 13:50:17 2009 for Generic Image Library by
<a href="http://www.doxygen.org/index.html">
<img src="doxygen.png" alt="doxygen" align="middle" border="0"></a> 1.5.6 </small></address>
</body>
</html>
|
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l4e2^_(%l%ix^5s-sfv7)2o6#d27d!ck6yoqmpxa#n(h_zayzy'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
#ALLOWED_HOSTS = ['130.206.117.178', 'localhost', '127.0.0.1', '192.168.88.243']
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'bootstrap3',
'floybd',
'material',
'material.frontend',
'material.admin',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
DATA_UPLOAD_MAX_NUMBER_FIELDS = None
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.i18n',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
from django.utils.translation import ugettext_lazy as _
LANGUAGES = (
('en', _('English')),
('ca', _('Catalan')),
)
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'locale'),
)
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL = ('/admin')
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': 'unix:/tmp/memcached.sock',
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(asctime)s;%(levelname)s %(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': 'INFO',
'propagate': True,
},
},
}
|
// Increase Tracking for text layer or Duplicate and Nudge Right (alt →)
@import 'js/utilities.js'
var onRun = function(context) {
var doc = context.document;
var selection = context.selection;
for(var i=0; i < [selection count]; i++){
if (isText(selection[i])) { // tracking part
var layer = selection[i];
var tracking = layer.characterSpacing();
layer.characterSpacing = tracking + 1;
layer.adjustFrameToFit();
}
else { // nudge part
var s = selection[i]
var copy = [s duplicate]
var frame = [copy frame]
[frame setX:([frame x] + 1)]
if (i==0) {
[copy select:true byExpandingSelection:false]
} else {
[copy select:true byExpandingSelection:true]
}
}
}
[doc reloadInspector]
}
|
--TEST--
MongoDB\BSON\Regex initialization will alphabetize flags
--FILE--
<?php
$regex = new MongoDB\BSON\Regex('regexp', 'xusmli');
var_dump($regex);
?>
--EXPECTF--
object(MongoDB\BSON\Regex)#%d (%d) {
["pattern"]=>
string(6) "regexp"
["flags"]=>
string(6) "ilmsux"
}
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pyramid_tm
import sqlalchemy
from conary import conarycfg
from conary import conaryclient
from conary.repository import errors as cny_errors
from conary.repository.netrepos.auth_tokens import AuthToken
from conary.repository.netrepos import geoip
from conary.server.wsgi_hooks import ConaryHandler
from pyramid import config
from pyramid import request
from pyramid.decorator import reify
from sqlalchemy.pool import NullPool
from . import render
from .db import models
from .db import schema
class Request(request.Request):
@reify
def geoip(self):
return geoip.GeoIPLookup(self.cfg.geoIpFiles)
@reify
def db(self):
maker = self.registry.settings['db.sessionmaker']
db = maker()
schema.checkVersion(db)
return db
def getConaryClient(self, entitlements=()):
cfg = conarycfg.ConaryConfiguration(False)
cfg.configLine('proxyMap * conarys://localhost/conary/')
for key in entitlements:
cfg.entitlement.addEntitlement('*', key)
cli = conaryclient.ConaryClient(cfg)
self.addHeaders(cli)
return cli
def getForwardedFor(self):
authToken = AuthToken()
ConaryHandler.setRemoteIp(authToken, request=self)
return authToken.forwarded_for + [authToken.remote_ip]
def addHeaders(self, cli):
cache = cli.repos.c
forwarded_for = self.getForwardedFor()
old_factory = cache.TransportFactory
def TransportFactory(*args, **kwargs):
transport = old_factory(*args, **kwargs)
if forwarded_for:
transport.addExtraHeaders({
'x-forwarded-for': ', '.join(forwarded_for)})
return transport
cache.TransportFactory = TransportFactory
def filterFiles(self, files, cust_id=None):
if cust_id is not None:
entitlements = self.db.query(models.CustomerEntitlement,
).filter_by(cust_id=cust_id
).all()
entitlements = [x.entitlement.encode('ascii') for x in entitlements]
else:
entitlements = []
repos = self.getConaryClient(entitlements).repos
try:
has_files = repos.hasTroves(x.trove_tup for x in files)
return [x for x in files if has_files[x.trove_tup]]
except cny_errors.InsufficientPermission:
return []
def configure(ucfg):
dburl = ucfg.downloadDB
engine = sqlalchemy.create_engine(dburl, poolclass=NullPool)
maker = models.initialize_sql(engine, use_tm=True)
settings = {
'db.sessionmaker': maker,
}
# Configuration
cfg = config.Configurator(settings=settings)
cfg.add_renderer('json', render.json_render_factory)
cfg.include(pyramid_tm)
# Routes
cfg.add_route('conaryrc', '/conaryrc')
cfg.add_route('images_index', '/images')
cfg.add_route('image', '/images/by_id/{sha1}')
cfg.add_route('image_content', '/images/by_id/{sha1}/content')
cfg.add_route('customer_content', '/images/by_id/{sha1}/content/{cust_id}')
cfg.add_route('customer_images', '/images/by_customer/{cust_id}')
cfg.add_route('cust_ents', '/customers/{cust_id}/entitlements')
cfg.add_route('cust_ent_put', '/customer_entitlements')
# Registration
cfg.add_route('records', '/registration/v1/records')
# Views
cfg.scan(package='upsrv.views')
return cfg
def handle(request):
cfg = configure(ucfg=request.cfg)
app = cfg.make_wsgi_app()
return app.invoke_subrequest(request, use_tweens=True)
|
/*
* Kuali Coeus, a comprehensive research administration system for higher education.
*
* Copyright 2005-2016 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kra.irb.auth;
import org.apache.commons.lang3.StringUtils;
import org.kuali.coeus.sys.framework.workflow.KcWorkflowService;
import org.kuali.kra.infrastructure.Constants;
import org.kuali.kra.infrastructure.PermissionConstants;
import org.kuali.kra.irb.Protocol;
import org.kuali.kra.irb.actions.submit.ProtocolReviewType;
import org.kuali.kra.irb.actions.submit.ProtocolSubmission;
/**
* Determine if a user can assign a protocol to a committee/schedule.
*/
public class ProtocolAssignReviewersUnavailableAuthorizer extends ProtocolAuthorizer {
private KcWorkflowService kraWorkflowService;
@Override
public boolean isAuthorized(String username, ProtocolTask task) {
Protocol protocol = task.getProtocol();
ProtocolSubmission submission = findSubmission(protocol);
return (!kraWorkflowService.isCurrentNode(protocol.getProtocolDocument(), Constants.PROTOCOL_IRBREVIEW_ROUTE_NODE_NAME) ||
!isPendingOrSubmittedToCommittee(protocol) ||
!(canPerformActionOnExpedited(protocol) || (isScheduleRequiredForReview(submission) && isAssignedToCommitteeAndSchedule(submission))
|| (!isScheduleRequiredForReview(submission) && isAssignedToCommittee(submission)))
) &&
hasPermission(username, protocol, PermissionConstants.PERFORM_IRB_ACTIONS_ON_PROTO);
}
/**
* Is the protocol's submission in a pending or submitted to committee status?
* @param protocol
* @return
*/
private boolean isPendingOrSubmittedToCommittee(Protocol protocol) {
return findSubmission(protocol) != null;
}
/**
* Is the submission assigned to a committee?
* @param protocol
* @return
*/
private boolean isAssignedToCommittee(ProtocolSubmission submission) {
return submission != null && !StringUtils.isBlank(submission.getCommitteeId());
}
/**
* Is the submission assigned to a committee and schedule?
* @param protocol
* @return
*/
private boolean isAssignedToCommitteeAndSchedule(ProtocolSubmission submission) {
return !StringUtils.isBlank(submission.getCommitteeId()) && !StringUtils.isBlank(submission.getScheduleId());
}
/**
* Is the submission for a full committee review
* @param submission
* @return
*/
private boolean isFullCommitteeReview(ProtocolSubmission submission) {
return submission != null && ProtocolReviewType.FULL_TYPE_CODE.equals(submission.getProtocolReviewTypeCode());
}
private boolean isScheduleRequiredForReview (ProtocolSubmission submission) {
return isFullCommitteeReview(submission) && StringUtils.isBlank(submission.getScheduleId());
}
public KcWorkflowService getKraWorkflowService() {
return kraWorkflowService;
}
public void setKraWorkflowService(KcWorkflowService kraWorkflowService) {
this.kraWorkflowService = kraWorkflowService;
}
}
|
"""
# Majic
# Copyright (C) 2014 CEH
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
from pylons import url
from joj.lib.base import redirect
from joj.lib import helpers
from joj.utils import constants, utils
class ModelRunControllerHelper(object):
"""
Helper for general functions in the model run controller
"""
def __init__(self, model_run_service):
"""
Create a model run controller helper
:param model_run_service: the model run service
:return:nothing
"""
self._model_run_service = model_run_service
def check_user_quota(self, current_user):
"""
Check that the user can create a new model run
:param current_user: the current user
:return: the model run being created
"""
total = 0
for user_id, status_name, storage_in_mb in self._model_run_service.get_storage_used(current_user):
if status_name != constants.MODEL_RUN_STATUS_PUBLISHED and status_name != constants.MODEL_RUN_STATUS_PUBLIC:
total += storage_in_mb
total_in_gb = utils.convert_mb_to_gb_and_round(total)
storage_percent_used = round(total_in_gb / current_user.storage_quota_in_gb * 100.0, 0)
if storage_percent_used >= constants.QUOTA_ABSOLUTE_LIMIT_PERCENT:
helpers.error_flash(constants.ERROR_MESSAGE_QUOTA_EXCEEDED)
redirect(url(controller="model_run", action="index"))
|
/** @jsx React.DOM */
var React = require('react');
var MallFilter = require('./MallFilter');
var SaleSection = require('./SaleSection');
var GlugrApp = React.createClass({
getInitialState: function () {
return { mallId: 1 };
},
filterSales: function (mallId) {
this.setState({
mallId: mallId
});
},
render: function () {
return (
<div className="container">
<div className="row">
<div className="12u">
<MallFilter
mallId={this.state.mallId}
malls={this.props.malls}
onTabChange={this.filterSales}
/>
</div>
</div>
<div className="row">
<div className="12u">
<SaleSection
mallId={this.state.mallId}
sales={this.props.sales}
/>
</div>
</div>
<div className="row">
<div className="12u">
<section>
<header className="major">
<h2>The Blog</h2>
</header>
<div className="row">
<div className="6u 12u(mobile)">
<section className="box">
<a href="#" className="image featured"><img src="images/pic08.jpg" alt="" /></a>
<header>
<h3>Magna tempus consequat lorem</h3>
<p>Posted 45 minutes ago</p>
</header>
<p>Lorem ipsum dolor sit amet sit veroeros sed et blandit consequat sed veroeros lorem et blandit adipiscing feugiat phasellus tempus hendrerit, tortor vitae mattis tempor, sapien sem feugiat sapien, id suscipit magna felis nec elit. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos lorem ipsum dolor sit amet.</p>
<footer>
<ul className="actions">
<li><a href="#" className="button icon fa-file-text">Continue Reading</a></li>
<li><a href="#" className="button alt icon fa-comment">33 comments</a></li>
</ul>
</footer>
</section>
</div>
<div className="6u 12u(mobile)">
<section className="box">
<a href="#" className="image featured"><img src="images/pic09.jpg" alt="" /></a>
<header>
<h3>Aptent veroeros et aliquam</h3>
<p>Posted 45 minutes ago</p>
</header>
<p>Lorem ipsum dolor sit amet sit veroeros sed et blandit consequat sed veroeros lorem et blandit adipiscing feugiat phasellus tempus hendrerit, tortor vitae mattis tempor, sapien sem feugiat sapien, id suscipit magna felis nec elit. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos lorem ipsum dolor sit amet.</p>
<footer>
<ul className="actions">
<li><a href="#" className="button icon fa-file-text">Continue Reading</a></li>
<li><a href="#" className="button alt icon fa-comment">33 comments</a></li>
</ul>
</footer>
</section>
</div>
</div>
</section>
</div>
</div>
</div>
);
}
});
module.exports = GlugrApp;
|
"""
"""
__author__ = 'gavinhackeling@gmail.com'
################# Sample 1 #################
"""
>>> import numpy as np
>>> print np.linalg.eig(np.array([[3, 2], [1, 2]]))[0]
[ 4. 1.]
"""
import numpy as np
print np.linalg.eig(np.array([[3, 2], [1, 2]]))[0]
W, V = np.linalg.eig(np.array([[3, 2], [1, 2]]))
################# Sample 2 #################
"""
>>> import numpy as np
>>> print np.linalg.eig(np.array([[3, 2], [1, 2]]))[1]
[[ 0.89442719 -0.70710678]
[ 0.4472136 0.70710678]]
"""
import numpy as np
print np.linalg.eig(np.array([[3, 2], [1, 2]]))[1]
################# Figure 1#################
"""
"""
import matplotlib
matplotlib.use('Qt4Agg')
import matplotlib.pyplot as plt
import numpy as np
A = np.array([[2, 1], [1, 2]])
plt.scatter(A[:, 0], A[:, 1], marker='.', s=200)
plt.plot(A[0], A[1])
plt.show()
################# Sample: Face Recognition #################
"""
>>> from os import walk, path
>>> import numpy as np
>>> import mahotas as mh
>>> from sklearn.cross_validation import train_test_split
>>> from sklearn.cross_validation import cross_val_score
>>> from sklearn.preprocessing import scale
>>> from sklearn.decomposition import PCA
>>> from sklearn.linear_model import LogisticRegression
>>> from sklearn.metrics import classification_report
>>> X = []
>>> y = []
>>> for dir_path, dir_names, file_names in walk('data/att-faces/orl_faces'):
>>> for fn in file_names:
>>> if fn[-3:] == 'pgm':
>>> image_filename = path.join(dir_path, fn)
>>> X.append(scale(mh.imread(image_filename, as_grey=True).reshape(10304).astype('float32')))
>>> y.append(dir_path)
>>> X = np.array(X)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y)
>>> pca = PCA(n_components=150)
>>> X_train_reduced = pca.fit_transform(X_train)
>>> X_test_reduced = pca.transform(X_test)
>>> print 'The original dimensions of the training data were', X_train.shape
>>> print 'The reduced dimensions of the training data are', X_train_reduced.shape
>>> classifier = LogisticRegression()
>>> accuracies = cross_val_score(classifier, X_train_reduced, y_train)
>>> print 'Cross validation accuracy:', np.mean(accuracies), accuracies
>>> classifier.fit(X_train_reduced, y_train)
>>> predictions = classifier.predict(X_test_reduced)
>>> print classification_report(y_test, predictions)
The original dimensions of the training data were (300, 10304)
The reduced dimensions of the training data are (300, 150)
Cross validation accuracy: 0.833841819347 [ 0.82882883 0.83 0.84269663]
precision recall f1-score support
data/att-faces/orl_faces/s1 1.00 1.00 1.00 2
data/att-faces/orl_faces/s10 1.00 1.00 1.00 2
data/att-faces/orl_faces/s11 1.00 0.60 0.75 5
...
data/att-faces/orl_faces/s9 1.00 1.00 1.00 2
avg / total 0.92 0.89 0.89 100
"""
from os import walk, path
import numpy as np
import mahotas as mh
from sklearn.cross_validation import train_test_split
from sklearn.cross_validation import cross_val_score
from sklearn.preprocessing import scale
from sklearn.decomposition import PCA
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import classification_report
X = []
y = []
for dir_path, dir_names, file_names in walk('data/att-faces/orl_faces'):
for fn in file_names:
if fn[-3:] == 'pgm':
image_filename = path.join(dir_path, fn)
X.append(scale(mh.imread(image_filename, as_grey=True).reshape(10304).astype('float32')))
y.append(dir_path)
X = np.array(X)
X_train, X_test, y_train, y_test = train_test_split(X, y)
pca = PCA(n_components=150)
X_train_reduced = pca.fit_transform(X_train)
X_test_reduced = pca.transform(X_test)
print 'The original dimensions of the training data were', X_train.shape
print 'The reduced dimensions of the training data are', X_train_reduced.shape
classifier = LogisticRegression()
accuracies = cross_val_score(classifier, X_train_reduced, y_train)
print 'Cross validation accuracy:', np.mean(accuracies), accuracies
classifier.fit(X_train_reduced, y_train)
predictions = classifier.predict(X_test_reduced)
print classification_report(y_test, predictions)
|
<?php
/* flaminuxmioBundle:Default:dammiUnNome.html.twig */
class __TwigTemplate_10f2f55dd7105d82d79882c0b8c2b2d3 extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
$this->parent = false;
$this->blocks = array(
);
}
protected function doDisplay(array $context, array $blocks = array())
{
// line 2
echo "Il risultato è ";
echo twig_escape_filter($this->env, (isset($context["stocazzo"]) ? $context["stocazzo"] : $this->getContext($context, "stocazzo")), "html", null, true);
echo "!";
}
public function getTemplateName()
{
return "flaminuxmioBundle:Default:dammiUnNome.html.twig";
}
public function isTraitable()
{
return false;
}
public function getDebugInfo()
{
return array ( 19 => 2,);
}
}
|
#!/usr/bin/python
import numpy as np
import matplotlib.pyplot as plt
def linearPowerLimitedRocket_dist( t, F, ve=10000, m0=1.0 ):
A = m0*ve
B = np.log( F*t - A )
return F*ve*( t/F + A*B/F**2 ) - t*ve*B
def getAccFT( F=None, ve=10000, P=1e+9, m0=1.0, fmass=0.1 ):
if F is None:
F = P/ve ;print "F[N]" , F
tend = (1-fmass)*ve*m0/F ;print "tend[s]", tend
return tend, F
def linearPowerLimitedRocket( t, ve=10000, P=1e+9, m0=1.0 ):
#F = P/ve ;print "F[N]" , F
#tend = m0*ve*(fmass - 1)/(F*fmass) ;print "tend[s]", tend
#tend = (1-fmass)*ve*m0/F ;print "tend[s]", tend
tend, F = getAccFT( ve=ve, P=P, m0=m0 )
a = F/( m0 -F*t/ve ) #;primt "a[G]", a/9.81
v0 = ve*np.log( m0*ve )
v = -ve*np.log( np.abs( m0*ve - F*t ) ) + v0
#s = ( ve*t + t*v - v*(m0*ve/F) )
s = ve*t + v * ( t - m0*ve/F )
#s = linearPowerLimitedRocket_dist( t, F, ve=ve, m0=m0 )
return s,v,a
P = 10e+9
ve = 10e+3
fmass = 0.1
m0 = 1.0
tend, F = getAccFT( ve=ve, P=P, m0=m0, fmass=fmass )
ts = np.linspace(0,tend,1000)
s,v,a = linearPowerLimitedRocket( ts, ve=ve, P=P, m0=m0 )
plt.figure( figsize=(5,9) )
plt.subplot(3,1,1); plt.plot( ts, a ); plt.ylabel('a'); plt.xlabel('t[s]'); plt.grid()
plt.axvline( tend, ls="--")
plt.subplot(3,1,2); plt.plot( ts, v ); plt.ylabel('v [m/s]'); plt.xlabel('t[s]') ; plt.grid()
plt.axvline( tend, ls="--")
plt.subplot(3,1,3); plt.plot( ts, s ); plt.ylabel('s [m] '); plt.xlabel('t[s]') ; plt.grid()
plt.show()
|
package com.bytegames.prevent;
import com.bytegames.prevent.MathHelper;
import java.awt.*;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
/**
* A 2D entity to display.
*
* @author byte
*/
public class Sprite {
private BufferedImage _image;
private double _angle;
/**
* Instantiates a new sprite.
*
* @param image The image this sprite has.
*/
public Sprite(BufferedImage image) {
_image = image;
_angle = 0;
}
/**
* @return The image stored in this sprite.
*/
public BufferedImage getImage() {
return _image;
}
/**
* @param image The image to store in this sprite.
*/
public void setImage(BufferedImage image) {
_image = image;
}
/**
* Rotate the sprite counter-clockwise.
*
* @param byAngle The angle to rotate the sprite left by.
*/
public void rotateLeft(double byAngle) {
_angle = MathHelper.clampAngle(_angle - byAngle);
}
/**
* Rotate the sprite clockwise.
*
* @param byAngle The angle to rotate the sprite right by.
*/
public void rotateRight(double byAngle) {
_angle = MathHelper.clampAngle(_angle + byAngle);
}
/**
* @return The angle the sprite is currently at.
*/
public double getAngle() {
return _angle;
}
/**
* @param angle The angle to set the sprite at.
*/
public void setAngle(double angle) {
_angle = MathHelper.clampAngle(angle);
}
/**
* Renders the sprite to the display.
*
* @param gfx The graphics object to draw with.
* @param point The place to draw at.
*/
public void draw(Graphics gfx, Point point) {
Graphics2D gfx2d = (Graphics2D)gfx;
//set transform based on angle
AffineTransform xform = gfx2d.getTransform();
AffineTransform xformOriginal = (AffineTransform)(xform.clone());
xform.rotate(Math.toRadians(_angle), point.x, point.y);
gfx2d.setTransform(xform);
//draw it
gfx2d.drawImage(_image, point.x, point.y, null);
//reset transform back
gfx2d.setTransform(xformOriginal);
}
}
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`.
"""
import warnings
from airflow.providers.amazon.aws.operators.gcs_to_s3 import GCSToS3Operator
warnings.warn(
"This module is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3`.",
DeprecationWarning, stacklevel=2
)
class GoogleCloudStorageToS3Operator(GCSToS3Operator):
"""
This class is deprecated. Please use `airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator`.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"This class is deprecated. "
"Please use `airflow.providers.amazon.aws.operators.gcs_to_s3.GCSToS3Operator`.",
DeprecationWarning, stacklevel=2
)
super().__init__(*args, **kwargs)
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Gestión de activos fijos para España",
"version": "1.0",
"depends": ["account_asset"],
"author": "Serv. Tecnol. Avanzados - Pedro M. Baeza, "
"Odoo Community Association (OCA)",
"description": """
Gestión de activos fijos española
=================================
Cambia la gestión estándar de activos fijos de OpenERP para acomodarla a las
regulaciones españolas:
* Cambia el método de cálculo para el prorrateo temporal.
* Añade un nuevo método de cálculo para porcentaje fijo por periodo.
* Añade la opción de trasladar la depreciación al final del periodo.
""",
"website": "http://www.serviciosbaeza.com",
"category": "Accounting & Finance",
"data": [
"account_asset_view.xml",
],
"active": False,
"installable": True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
#!/usr/bin/env python
import argparse
import boto
import datetime
import json
import os
import sys
import time
# Set timeout, for retry
#if not boto.config.has_section('Boto'):
# boto.config.add_section('Boto')
#boto.config.set('Boto','http_socket_timeout','30')
################################
# Parse command line args
parser = argparse.ArgumentParser(description='AWS uploader, please fill in your aws key and id in Boto config (~/.boto)')
parser.add_argument('-p','--path', help='Optional. Where to find the binaries, normally out/Release/dist', required=False)
parser.add_argument('-b','--buildername', help='Builder name, e.g. linux_32bit', required=True)
parser.add_argument('-r','--revision', help='Commit revision',required=True)
parser.add_argument('-n','--number', help='Build number', required=True)
parser.add_argument('-t','--bucket', help='AWS bucket name', required=True)
args = parser.parse_args()
################################
# Check and init variables
dist_dir = args.path
builder_name = args.buildername
got_revision = args.revision
build_number = args.number
bucket_name = args.bucket
date = datetime.date.today().strftime('%m-%d-%Y')
# If the binaries location is not given, calculate it from script related dir.
if dist_dir == None:
dist_dir = os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, 'out', 'Release', 'dist')
if not os.path.isabs(dist_dir):
dist_dir = os.path.join(os.getcwd(), dist_dir)
if not os.path.isdir(dist_dir):
print 'Invalid path: ' + dist_dir
exit(-1)
dist_dir = os.path.normpath(dist_dir)
# it's for S3, so always use '/' here
upload_path = ''.join(['/' + date,
'/' + builder_name + '-build-' + build_number + '-' + got_revision])
file_list = os.listdir(dist_dir)
if len(file_list) == 0:
print 'Cannot find packages!'
exit(-1)
# move node-webkit- to the top of the list.
for i in range(len(file_list)):
fname = file_list[i]
if fname.startswith('node-webkit-v'):
del file_list[i]
file_list.insert(0,fname)
break
def print_progress(transmitted, total):
print ' %d%% transferred of total: %d bytes.' % (transmitted*100/total, total)
sys.stdout.flush()
def aws_upload(upload_path, file_list):
conn = boto.connect_s3()
print 'Connecting to S3 ...'
sys.stdout.flush()
bucket = conn.get_bucket(bucket_name)
print 'Uploading to: ' + upload_path
for f in file_list:
print 'Uploading "' + f + '" ...'
sys.stdout.flush()
# use '/' for s3
key = bucket.new_key(upload_path + '/' + f)
key.set_contents_from_filename(filename=os.path.join(dist_dir, f), cb=print_progress, num_cb=50, replace=True)
for retry in range(3):
try:
aws_upload(upload_path, file_list)
break
except Exception, e:
print e
sys.stdout.flush()
time.sleep(30) #wait for 30s and try again.
print 'Done.'
# vim: et:ts=4:sw=4
|
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 18 14:26:20 2013
@author: rarossi
"""
#!/usr/bin/python
# File: sum_primes.py
# Author: VItalii Vanovschi
# Desc: This program demonstrates parallel computations with pp module
# It calculates the sum of prime numbers below a given integer in parallel
# Parallel Python Software: http://www.parallelpython.com
import math, sys, time
import pp
def isprime(n):
"""Returns True if n is prime and False otherwise"""
if not isinstance(n, int):
raise TypeError("argument passed to is_prime is not of 'int' type")
if n < 2:
return False
if n == 2:
return True
max = int(math.ceil(math.sqrt(n)))
i = 2
while i <= max:
if n % i == 0:
return False
i += 1
return True
def sum_primes(n):
"""Calculates sum of all primes below given integer n"""
return sum([x for x in xrange(2,n) if isprime(x)])
print """Usage: python sum_primes.py [ncpus]
[ncpus] - the number of workers to run in parallel,
if omitted it will be set to the number of processors in the system
"""
# tuple of all parallel python servers to connect with
ppservers = ()
#ppservers = ("10.0.0.1",)
if len(sys.argv) > 1:
ncpus = int(sys.argv[1])
# Creates jobserver with ncpus workers
job_server = pp.Server(ncpus, ppservers=ppservers)
else:
# Creates jobserver with automatically detected number of workers
job_server = pp.Server(ppservers=ppservers)
print "Starting pp with", job_server.get_ncpus(), "workers"
# Submit a job of calulating sum_primes(100) for execution.
# sum_primes - the function
# (100,) - tuple with arguments for sum_primes
# (isprime,) - tuple with functions on which function sum_primes depends
# ("math",) - tuple with module names which must be imported before sum_primes execution
# Execution starts as soon as one of the workers will become available
job1 = job_server.submit(sum_primes, (100,), (isprime,), ("math",))
# Retrieves the result calculated by job1
# The value of job1() is the same as sum_primes(100)
# If the job has not been finished yet, execution will wait here until result is available
result = job1()
print "Sum of primes below 100 is", result
start_time = time.time()
# The following submits 8 jobs and then retrieves the results
inputs = (1000000, 1001000, 1002000, 1003000, 1004000, 1005000, 1006000, 1007000,
1000000, 1001000, 1002000, 1003000, 1004000, 1005000, 1006000, 1007000,
1000000, 1001000, 1002000, 1003000, 1004000, 1005000, 1006000, 1007000,
1000000, 1001000, 1002000, 1003000, 1004000, 1005000, 1006000, 1007000,
1000000, 1001000, 1002000, 1003000, 1004000, 1005000, 1006000, 1007000,
1000000, 1001000, 1002000, 1003000, 1004000, 1005000, 1006000, 1007000,
1000000, 1001000, 1002000, 1003000, 1004000, 1005000, 1006000, 1007000,
1000000, 1001000, 1002000, 1003000, 1004000, 1005000, 1006000, 1007000)
jobs = [(input, job_server.submit(sum_primes,(input,), (isprime,), ("math",))) for input in inputs]
for input, job in jobs:
print "Sum of primes below", input, "is", job()
print "Time elapsed: ", time.time() - start_time, "s"
job_server.print_stats()
# Parallel Python Software: http://www.parallelpython.com
|
/// <reference path='fourslash.ts'/>
////class G<T> { // Introduce type parameter T
//// self: G<T>; // Use T as type argument to form instance type
//// f() {
//// this./*1*/self = /*2*/this; // self and this are both of type G<T>
//// }
////}
goTo.marker('1');
verify.quickInfoIs('(property) G<T>.self: G<T>');
goTo.marker('2');
verify.quickInfoIs('class G<T>');
|
'use strict';
const chai = require('chai'),
expect = chai.expect,
Support = require(__dirname + '/../support'),
DataTypes = require(__dirname + '/../../../lib/data-types'),
Sequelize = require('../../../index');
describe(Support.getTestDialectTeaser('associations'), () => {
describe('Test options.foreignKey', () => {
beforeEach(function() {
this.A = this.sequelize.define('A', {
id: {
type: DataTypes.CHAR(20),
primaryKey: true
}
});
this.B = this.sequelize.define('B', {
id: {
type: Sequelize.CHAR(20),
primaryKey: true
}
});
this.C = this.sequelize.define('C', {});
});
it('should not be overwritten for belongsTo', function(){
const reqValidForeignKey = { foreignKey: { allowNull: false }};
this.A.belongsTo(this.B, reqValidForeignKey);
this.A.belongsTo(this.C, reqValidForeignKey);
expect(this.A.attributes.CId.type).to.deep.equal(this.C.attributes.id.type);
});
it('should not be overwritten for belongsToMany', function(){
const reqValidForeignKey = { foreignKey: { allowNull: false }, through: 'ABBridge'};
this.B.belongsToMany(this.A, reqValidForeignKey);
this.A.belongsTo(this.C, reqValidForeignKey);
expect(this.A.attributes.CId.type).to.deep.equal(this.C.attributes.id.type);
});
it('should not be overwritten for hasOne', function(){
const reqValidForeignKey = { foreignKey: { allowNull: false }};
this.B.hasOne(this.A, reqValidForeignKey);
this.A.belongsTo(this.C, reqValidForeignKey);
expect(this.A.attributes.CId.type).to.deep.equal(this.C.attributes.id.type);
});
it('should not be overwritten for hasMany', function(){
const reqValidForeignKey = { foreignKey: { allowNull: false }};
this.B.hasMany(this.A, reqValidForeignKey);
this.A.belongsTo(this.C, reqValidForeignKey);
expect(this.A.attributes.CId.type).to.deep.equal(this.C.attributes.id.type);
});
});
});
|
/* EEPROM based configuration data storage structure
Whole flight controller configuration is being stored inside an CONFIG structure
that can be accesed as data union (which is required for easy manipulation with
the configuration data over serial and easy way of storing this data in EEPROM).
This method allow us to do "smart" memory checking with the new data against
data stored in EEPROM, which means we doesn't have to re-write whole configuration
union inside the EEPROM, but we just modify bytes that changed.
This will protect EEPROM from unnecessary writes, extending its lifetime
(because EEPROM writes are limited, actual number of writes depends on the chip).
*/
#if defined(__MK20DX128__)
#define EEPROM_SIZE 512
#endif
/*ToDo: check the real size for the MK20DX256 */
#if defined(__MK20DX256__)
#define EEPROM_SIZE 512 //ToDo: Check this value, I expect 2048
#endif
#define EEPROM_VERSION 5
struct AXIS_MAP_struct {
uint8_t axis1:2;
uint8_t axis1_sign:1;
uint8_t axis2:2;
uint8_t axis2_sign:1;
uint8_t axis3:2;
uint8_t axis3_sign:1;
uint8_t initialized:1;
};
struct __attribute__((packed)) CONFIG_struct {
uint8_t version;
bool calibrateESC;
uint16_t minimumArmedThrottle;
// Sensor axis map
struct AXIS_MAP_struct GYRO_AXIS_MAP;
struct AXIS_MAP_struct ACCEL_AXIS_MAP;
struct AXIS_MAP_struct MAG_AXIS_MAP;
// Accelerometer
int16_t ACCEL_BIAS[3];
// RX
uint8_t CHANNEL_ASSIGNMENT[16];
uint64_t CHANNEL_FUNCTIONS[4];
// Attitude
float PID_YAW_c[4];
float PID_PITCH_c[4];
float PID_ROLL_c[4];
// Rate
float PID_YAW_m[4];
float PID_PITCH_m[4];
float PID_ROLL_m[4];
float PID_BARO[4];
float PID_SONAR[4];
// GPS
float PID_GPS[4];
};
union CONFIG_union {
struct CONFIG_struct data;
uint8_t raw[sizeof(struct CONFIG_struct)];
};
CONFIG_union CONFIG;
void initializeEEPROM(void) {
// Default settings should be initialized here
CONFIG.data.version = EEPROM_VERSION;
CONFIG.data.calibrateESC = 0;
CONFIG.data.minimumArmedThrottle = 1100;
// Accelerometer
CONFIG.data.ACCEL_BIAS[XAXIS] = 0;
CONFIG.data.ACCEL_BIAS[YAXIS] = 0;
CONFIG.data.ACCEL_BIAS[ZAXIS] = 0;
// RX
CONFIG.data.CHANNEL_ASSIGNMENT[0] = 0;
CONFIG.data.CHANNEL_ASSIGNMENT[1] = 1;
CONFIG.data.CHANNEL_ASSIGNMENT[2] = 2;
CONFIG.data.CHANNEL_ASSIGNMENT[3] = 3;
CONFIG.data.CHANNEL_ASSIGNMENT[4] = 4;
CONFIG.data.CHANNEL_ASSIGNMENT[5] = 5;
CONFIG.data.CHANNEL_ASSIGNMENT[6] = 6;
CONFIG.data.CHANNEL_ASSIGNMENT[7] = 7;
CONFIG.data.CHANNEL_ASSIGNMENT[8] = 8;
CONFIG.data.CHANNEL_ASSIGNMENT[9] = 9;
CONFIG.data.CHANNEL_ASSIGNMENT[10] = 10;
CONFIG.data.CHANNEL_ASSIGNMENT[11] = 11;
CONFIG.data.CHANNEL_ASSIGNMENT[12] = 12;
CONFIG.data.CHANNEL_ASSIGNMENT[13] = 13;
CONFIG.data.CHANNEL_ASSIGNMENT[14] = 14;
CONFIG.data.CHANNEL_ASSIGNMENT[15] = 15;
CONFIG.data.CHANNEL_FUNCTIONS[0] = 0x04; // mode select ("stable mode" is set to trigger on AUX1-HIGH by default)
CONFIG.data.CHANNEL_FUNCTIONS[1] = 0x00; // baro select
CONFIG.data.CHANNEL_FUNCTIONS[2] = 0x00; // sonar select
CONFIG.data.CHANNEL_FUNCTIONS[3] = 0x00; // GPS select
// Altitude
CONFIG.data.PID_YAW_c[P] = 4.0;
CONFIG.data.PID_YAW_c[I] = 0.0;
CONFIG.data.PID_YAW_c[D] = 0.0;
CONFIG.data.PID_YAW_c[WG] = 25.0;
CONFIG.data.PID_PITCH_c[P] = 4.0;
CONFIG.data.PID_PITCH_c[I] = 0.0;
CONFIG.data.PID_PITCH_c[D] = 0.0;
CONFIG.data.PID_PITCH_c[WG] = 25.0;
CONFIG.data.PID_ROLL_c[P] = 4.0;
CONFIG.data.PID_ROLL_c[I] = 0.0;
CONFIG.data.PID_ROLL_c[D] = 0.0;
CONFIG.data.PID_ROLL_c[WG] = 25.0;
// Rate
CONFIG.data.PID_YAW_m[P] = 200.0;
CONFIG.data.PID_YAW_m[I] = 5.0;
CONFIG.data.PID_YAW_m[D] = 0.0;
CONFIG.data.PID_YAW_m[WG] = 100.0;
CONFIG.data.PID_PITCH_m[P] = 80.0;
CONFIG.data.PID_PITCH_m[I] = 0.0;
CONFIG.data.PID_PITCH_m[D] = -3.0;
CONFIG.data.PID_PITCH_m[WG] = 1000.0;
CONFIG.data.PID_ROLL_m[P] = 80.0;
CONFIG.data.PID_ROLL_m[I] = 0.0;
CONFIG.data.PID_ROLL_m[D] = -3.0;
CONFIG.data.PID_ROLL_m[WG] = 1000.0;
// Baro
CONFIG.data.PID_BARO[P] = 25.0;
CONFIG.data.PID_BARO[I] = 0.6;
CONFIG.data.PID_BARO[D] = -1.0;
CONFIG.data.PID_BARO[WG] = 25.0;
// Sonar
CONFIG.data.PID_SONAR[P] = 50.0;
CONFIG.data.PID_SONAR[I] = 0.6;
CONFIG.data.PID_SONAR[D] = -1.0;
CONFIG.data.PID_SONAR[WG] = 25.0;
// GPS
CONFIG.data.PID_GPS[P] = 0.0;
CONFIG.data.PID_GPS[I] = 0.0;
CONFIG.data.PID_GPS[D] = 0.0;
CONFIG.data.PID_GPS[WG] = 0.0;
// This function will only initialize data variables
// writeEEPROM() needs to be called manually to store this data in EEPROM
}
void writeEEPROM(void) {
for (uint16_t i = 0; i < sizeof(struct CONFIG_struct); i++) {
if (CONFIG.raw[i] != EEPROM.read(i)) {
// Only re-write new data
// blocks containing the same value will be left alone
EEPROM.write(i, CONFIG.raw[i]);
}
}
}
void readEEPROM(void) {
if (EEPROM.read(0) == 255) {
// No EEPROM values detected, re-initialize
initializeEEPROM();
} else {
// There "is" data in the EEPROM, read it all
for (uint16_t i = 0; i < sizeof(struct CONFIG_struct); i++) {
CONFIG.raw[i] = EEPROM.read(i);
}
// Verify version
if (CONFIG.data.version != EEPROM_VERSION) {
// Version doesn't match, re-initialize
initializeEEPROM();
}
}
}
|
#!/usr/bin/env python
"""This tests the performance of the AFF4 subsystem."""
from grr.lib import aff4
from grr.lib import data_store
from grr.lib import test_lib
from grr.lib.rdfvalues import client as rdf_client
class AFF4Benchmark(test_lib.AverageMicroBenchmarks):
"""Test performance of the AFF4 subsystem."""
REPEATS = 50
def testAFF4Creation(self):
"""How fast can we create new AFF4 objects."""
def CreateAFF4Object(object_type="AFF4Object", urn="aff4:/test"):
fd = aff4.FACTORY.Create(urn, object_type, token=self.token)
fd.Close()
for object_type in ["AFF4Object", "HashImage", "AFF4MemoryStream"]:
self.TimeIt(CreateAFF4Object, name="Create %s" % object_type,
object_type=object_type)
self.TimeIt(CreateAFF4Object, name="Create VFSGRRClient",
object_type="VFSGRRClient", urn="C.1234567812345678")
def testAFF4CreateAndSet(self):
"""How long does it take to create and set properties."""
client_info = rdf_client.ClientInformation(client_name="GRR",
client_description="Description")
def CreateAFF4Object():
"""Blind write a VFSGRRClient with 1000 client info attributes."""
fd = aff4.FACTORY.Create("C.1234567812345678", "VFSGRRClient",
token=self.token)
fd.Set(fd.Schema.HOSTNAME("Foobar"))
for _ in range(1000):
fd.AddAttribute(fd.Schema.CLIENT_INFO, client_info)
fd.Close()
# Time creation into an empty data store.
self.TimeIt(CreateAFF4Object, pre=data_store.DB.Clear)
# Now we want to measure the time to read one of these object.
data_store.DB.Clear()
CreateAFF4Object()
def ReadAFF4Object():
fd = aff4.FACTORY.Open("C.1234567812345678", token=self.token,
ignore_cache=True, age=aff4.ALL_TIMES)
self.assertEqual(fd.Get(fd.Schema.HOSTNAME), "Foobar")
self.TimeIt(ReadAFF4Object, name="Read attribute from AFF4Object")
def ReadVersionedAFF4Attribute():
fd = aff4.FACTORY.Open("C.1234567812345678", token=self.token,
ignore_cache=True, age=aff4.ALL_TIMES)
for x in fd.GetValuesForAttribute(fd.Schema.CLIENT_INFO):
self.assertEqual(x.client_name, "GRR")
self.TimeIt(ReadVersionedAFF4Attribute,
name="Read heavily versioned Attributes")
def ReadSomeVersionedAFF4Attribute():
fd = aff4.FACTORY.Open("C.1234567812345678", token=self.token,
ignore_cache=True, age=aff4.ALL_TIMES)
# Only read the top 5 attributes.
for i, x in enumerate(fd.GetValuesForAttribute(fd.Schema.CLIENT_INFO)):
self.assertEqual(x.client_name, "GRR")
if i > 50:
break
self.TimeIt(ReadSomeVersionedAFF4Attribute,
name="Read few versioned Attributes")
# Using Get() on a multi versioned object should only parse one value.
def ReadAVersionedAFF4Attribute():
fd = aff4.FACTORY.Open("C.1234567812345678", token=self.token,
ignore_cache=True, age=aff4.ALL_TIMES)
x = fd.Get(fd.Schema.CLIENT_INFO)
self.assertEqual(x.client_name, "GRR")
self.TimeIt(ReadAVersionedAFF4Attribute,
name="Read one versioned Attributes")
|
#!/usr/bin/env python
################################################################
# LiveQ - An interactive volunteering computing batch system
# Copyright (C) 2013 Ioannis Charalampidis
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
################################################################
# This script parses the description.json file which contains
# ----------
import sys
sys.path.append("../liveq-common")
# ----------
import shutil
import os
import ConfigParser
import json
import re
import glob
specific = "-"
out_dir = "ref.local"
generator = "pythia8"
version = "170"
# Load descriptions json
data = {}
with open("%s/description.json" % out_dir, "r") as f:
data = json.loads( "".join(f.readlines()) )
# Iterate over the description cache
checked = {}
for k,v in data.iteritems():
(observable, beam, process, energy, params, cuts, group, name, analysis) = v[0]
# Index to make sure we trim-out the fat
index = "%s:%s:%s:%s:%s" % (beam, process, energy, params, cuts)
if index in checked:
continue
checked[index] = True
# Build up the run cards that we need for pythia
print "%s %s %s %s %s %s %s" % (beam, process, energy, params, cuts, generator, version)
|
#!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
StringIO # workaround for pyflakes issue #13
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from cpu import CPUCollector
################################################################################
class TestCPUCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('CPUCollector', {
'interval': 10
})
self.collector = CPUCollector(config, None)
def test_import(self):
self.assertTrue(CPUCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def test_should_open_proc_stat(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/stat')
@patch.object(Collector, 'publish')
def test_should_work_with_synthetic_data(self, publish_mock):
patch_open = patch('__builtin__.open', Mock(return_value=StringIO(
'cpu 100 200 300 400 500 0 0 0 0 0')))
patch_open.start()
self.collector.collect()
patch_open.stop()
self.assertPublishedMany(publish_mock, {})
patch_open = patch('__builtin__.open', Mock(return_value=StringIO(
'cpu 110 220 330 440 550 0 0 0 0 0')))
patch_open.start()
self.collector.collect()
patch_open.stop()
self.assertPublishedMany(publish_mock, {
'total.idle': 4.0,
'total.iowait': 5.0,
'total.nice': 2.0,
'total.system': 3.0,
'total.user': 1.0
})
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
CPUCollector.PROC = self.getFixturePath('proc_stat_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
CPUCollector.PROC = self.getFixturePath('proc_stat_2')
self.collector.collect()
metrics = {
'total.idle': 2440.8,
'total.iowait': 0.2,
'total.nice': 0.0,
'total.system': 0.2,
'total.user': 0.4
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
@patch.object(Collector, 'publish')
def test_should_work_with_ec2_data(self, publish_mock):
self.collector.config['interval'] = 30
patch_open = patch('os.path.isdir', Mock(return_value=True))
patch_open.start()
CPUCollector.PROC = self.getFixturePath('ec2_stat_1')
self.collector.collect()
self.assertPublishedMany(publish_mock, {})
CPUCollector.PROC = self.getFixturePath('ec2_stat_2')
self.collector.collect()
patch_open.stop()
metrics = {
'total.idle': 68.4,
'total.iowait': 0.6,
'total.nice': 0.0,
'total.system': 13.7,
'total.user': 16.666666666666668
}
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
|
from django.forms import ModelForm
from django import forms
from incidents.models import IncidentCategory, Incident, Comments, BusinessLine
# forms ===============================================================
class IncidentForm(ModelForm):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('for_user', None)
permissions = kwargs.pop('permissions', None)
has_permission = True
if permissions is None:
permissions = ['incidents.handle_incidents', ]
has_permission = False
super(ModelForm, self).__init__(*args, **kwargs)
if self.user is not None:
if not isinstance(permissions, (list, tuple)):
permissions = [permissions, ]
if 'instance' not in kwargs and not has_permission:
permissions.append('incidents.report_events')
self.fields['concerned_business_lines'].queryset = BusinessLine.authorization.for_user(self.user,
permissions)
self.fields['subject'].error_messages['required'] = 'This field is required.'
self.fields['category'].error_messages['required'] = 'This field is required.'
self.fields['concerned_business_lines'].error_messages['required'] = 'This field is required.'
self.fields['detection'].error_messages['required'] = 'This field is required.'
self.fields['severity'].error_messages['required'] = 'This field is required.'
self.fields['is_major'].error_messages['required'] = 'This field is required.'
self.fields['is_major'].label = 'Major?'
def clean(self):
cleaned_data = super(IncidentForm, self).clean()
if self.user is not None:
business_lines = cleaned_data.get("concerned_business_lines")
is_incident = cleaned_data.get("is_incident")
if is_incident:
bl_ids = business_lines.values_list('id', flat=True)
handling_bls = BusinessLine.authorization.for_user(self.user, 'incidents.handle_incidents').filter(
pk__in=bl_ids).count()
if len(bl_ids) != handling_bls:
self.add_error('is_incident',
forms.ValidationError(_('You cannot create incidents for these business lines')))
return cleaned_data
class Meta:
model = Incident
exclude = ('opened_by', 'main_business_lines', 'is_starred', 'artifacts')
class CommentForm(ModelForm):
def __init__(self, *args, **kwargs):
super(ModelForm, self).__init__(*args, **kwargs)
self.fields['comment'].error_messages['required'] = 'This field is required.'
self.fields['action'].error_messages['required'] = 'This field is required.'
class Meta:
model = Comments
exclude = ('incident', 'opened_by')
widgets = {
'action': forms.Select(attrs={'required': True, 'class': 'form-control'})
}
class UploadFileForm(forms.Form):
title = forms.CharField()
file = forms.FileField()
|
# The Hazard Library
# Copyright (C) 2012-2014, GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module :mod:`openquake.hazardlib.speedups` contains internal utilities for
managing alternative implementation of the same functionality depending on
their availability.
"""
import inspect
class SpeedupsRegistry(object):
"""
Speedups registry allows to manage alternative implementations
of functions. Typical use case for it is something like this:
.. code-block:: python
# in the module namespace
def do_foo(foo, bar):
# code in pure python
...
def do_bar(baz, quux):
# code in pure python
...
# in the end of the module
try:
import _foobar_speedups
except ImportError:
import warnings
warnings.warn("foobar speedups are not available", RuntimeWarning)
else:
from openquake.hazardlib import speedups
def _c_do_foo(foo, bar):
return _foobar_speedups.do_foo(foo, bar)
speedups.register(do_foo, _c_do_foo)
del _c_do_foo
def _c_do_bar(baz, quux):
return _foobar_speedups.do_foo(baz, quux)
speedups.register(do_bar, _c_do_bar)
del _c_do_bar
Global registry is being used here. All speedups are enabled by default.
In order to disable them, use :meth:`disable`.
"""
def __init__(self):
self.enabled = True
self.funcs = {}
def register(self, func, altfunc):
"""
Add a function and its alternative implementation to the registry.
If registry is enabled, function code will be substituted
by an alternative implementation immediately.
:param func:
A function object to patch.
:param altfunc:
An alternative implementation of the function. Must have
the same signature and is supposed to behave exactly
the same way as ``func``.
"""
assert inspect.getargspec(func) == inspect.getargspec(altfunc), \
"functions signatures are different in %s and %s" % \
(func, altfunc)
self.funcs[func] = (func.func_code, altfunc.func_code)
if self.enabled:
# here we substitute the "func_code" attribute of the function,
# which allows us not to worry of when and how is this function
# being imported by other modules
func.func_code = altfunc.func_code
def enable(self):
"""
Set implementation to "alternative" for all the registered functions.
"""
for func in self.funcs:
origcode, altcode = self.funcs[func]
func.func_code = altcode
self.enabled = True
def disable(self):
"""
Set implementation to "original" for all the registered functions.
"""
for func in self.funcs:
origcode, altcode = self.funcs[func]
func.func_code = origcode
self.enabled = False
global_registry = SpeedupsRegistry()
#: Global (default) registry :meth:`register`.
register = global_registry.register
#: Global (default) registry :meth:`enable`.
enable = global_registry.enable
#: Global (default) registry :meth:`disable`.
disable = global_registry.disable
|
#pragma once
#include <Sweet.h>
#include <node\Node.h>
#include <typeinfo>
#include <Texture.h>
#include <Game.h>
#include <MY_ResourceManager.h>
#include <Log.h>
#include <ctime>
#include <NumberUtils.h>
#include <MY_Game.h>
#ifdef _DEBUG
// memory leak debugging
#define _CRTDBG_MAP_ALLOC
#include <stdlib.h>
#include <crtdbg.h>
#endif
#ifdef _DEBUG
int main(void){
_CrtMemState s1;
_CrtMemCheckpoint( &s1 );
#else
int WinMain(HINSTANCE inst, HINSTANCE prev, LPSTR cmd, int show){
#endif
Log::THROW_ON_ERROR = true;
//OpenAL_Sound::masterGain = 0; // mute
OpenAL_Sound::categoricalGain["music"] = 1.25f;
OpenAL_Sound::categoricalGain["voice"] = 1.f;
sweet::NumberUtils::seed(time(nullptr)); // seed RNG
sweet::initialize("Rip It! Grip It! Sip It!");
MY_ResourceManager::init();
MY_ResourceManager::load();
MY_Game * game = new MY_Game();
game->init();
while (game->isRunning){
game->performGameLoop();
}
delete game;
game = nullptr;
MY_ResourceManager::destruct();
#ifdef _DEBUG
std::cout << "Final node count: " << Node::nodes.size() << std::endl;
for(auto n : Node::nodes){
std::cout << typeid(*n).name() << " " << n << std::endl;
}
#endif
sweet::destruct();
#ifdef _DEBUG
_CrtMemDumpAllObjectsSince(&s1);
#endif
}
|
from chiplotle.hpgl.pen import Pen as HPGLPen
from chiplotle.core.interfaces.formatdecorator import FormatDecorator
class Pen(FormatDecorator):
'''The Pen wraps HPGL Pen properties around a given Shape.
- `pen` is the pen number to use.
- `sticky` boolean; set to False to set plotter back to default values
at the end of the decorated shape. Set to True to skip reset.
'''
def __init__(self,
number,
velocity = None,
force = None,
acceleration= None,
thickness = None):
FormatDecorator.__init__(self)
self.number = number
self.velocity = velocity
self.force = force
self.acceleration = acceleration
self.thickness = thickness
@property
def _subcommands(self):
p = HPGLPen(self.number,
self.velocity,
self.force,
self.acceleration,
self.thickness)
return [p]
## DEMO
if __name__ == '__main__':
from chiplotle.geometry.shapes.rectangle import rectangle
pd = Pen(2, 3, 4, 5, 0.1)
r = rectangle(100, 20)
pd(r)
print r.format
|
from __future__ import division, absolute_import, print_function
import os
from numpy.distutils.fcompiler.gnu import GnuFCompiler
compilers = ['VastFCompiler']
class VastFCompiler(GnuFCompiler):
compiler_type = 'vast'
compiler_aliases = ()
description = 'Pacific-Sierra Research Fortran 90 Compiler'
version_pattern = (r'\s*Pacific-Sierra Research vf90 '
r'(Personal|Professional)\s+(?P<version>[^\s]*)')
# VAST f90 does not support -o with -c. So, object files are created
# to the current directory and then moved to build directory
object_switch = ' && function _mvfile { mv -v `basename $1` $1 ; } && _mvfile '
executables = {
'version_cmd' : ["vf90", "-v"],
'compiler_f77' : ["g77"],
'compiler_fix' : ["f90", "-Wv,-ya"],
'compiler_f90' : ["f90"],
'linker_so' : ["<F90>"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
module_dir_switch = None #XXX Fix me
module_include_switch = None #XXX Fix me
def find_executables(self):
pass
def get_version_cmd(self):
f90 = self.compiler_f90[0]
d, b = os.path.split(f90)
vf90 = os.path.join(d, 'v'+b)
return vf90
def get_flags_arch(self):
vast_version = self.get_version()
gnu = GnuFCompiler()
gnu.customize(None)
self.version = gnu.get_version()
opt = GnuFCompiler.get_flags_arch(self)
self.version = vast_version
return opt
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
from numpy.distutils.fcompiler import new_fcompiler
compiler = new_fcompiler(compiler='vast')
compiler.customize()
print(compiler.get_version())
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Dipl.Tzt. Enno Deimel <ennodotvetatgmxdotnet>
#
# This file is part of gnuvet, published under the GNU General Public License
# version 3 or later (GPLv3+ in short). See the file LICENSE for information.
# Initially created: Sun Feb 8 13:57:36 2015 by: PyQt4 UI code generator 4.11.2
# TODO:
# Wie Umlaute, z.B. UEberweisung, in deutsch? Scheint kein Problem zu sein.
#
# Maybe: ck paymodes from db? Increase height accordingly?
from PyQt4.QtCore import Qt # Alignment, alas
from PyQt4.QtGui import (QApplication, QCheckBox, QDialogButtonBox,
QDoubleSpinBox, QLabel, QRadioButton, )
def tl(txt=''):
return QApplication.translate("Payment", txt, None, 1)
class Ui_Payment(object):
def setupUi(self, Payment):
Payment.resize(431, 300)
self.curbalLb = QLabel(Payment)
self.curbalLb.setGeometry(20, 10, 111, 16)
self.curbal = QLabel(Payment)
self.curbal.setGeometry(140, 10, 101, 20)
self.curbal.setAlignment(Qt.AlignRight|Qt.AlignTrailing|Qt.AlignVCenter)
self.totbalLb = QLabel(Payment)
self.totbalLb.setGeometry(20, 30, 111, 16)
self.totbal = QLabel(Payment)
self.totbal.setGeometry(140, 30, 101, 20)
self.totbal.setAlignment(Qt.AlignRight|Qt.AlignTrailing|Qt.AlignVCenter)
self.paymentLb = QLabel(Payment)
self.paymentLb.setGeometry(20, 73, 57, 15)
self.paymentSb = QDoubleSpinBox(Payment)
self.paymentSb.setGeometry(112, 70, 151, 22)
self.paymentSb.setMaximum(9999.99)
self.paymentSb.setAlignment(
Qt.AlignRight|Qt.AlignTrailing|Qt.AlignVCenter)
self.changeLb = QLabel(Payment)
self.changeLb.setGeometry(20, 100, 52, 15)
self.change = QLabel(Payment)
self.change.setGeometry(120, 100, 121, 20)
self.change.setAlignment(Qt.AlignRight|Qt.AlignTrailing|Qt.AlignVCenter)
self.printinvCb = QCheckBox(Payment)
self.printinvCb.setGeometry(20, 133, 100, 19)
self.printrecCb = QCheckBox(Payment)
self.printrecCb.setGeometry(20, 163, 100, 19)
self.modeLb = QLabel(Payment)
self.modeLb.setGeometry(280, 10, 101, 16)
self.cashRb = QRadioButton(Payment)
self.cashRb.setGeometry(280, 40, 120, 20)
self.cashRb.setChecked(True)
self.dcardRb = QRadioButton(Payment)
self.dcardRb.setGeometry(280, 70, 120, 20)
self.ccardRb = QRadioButton(Payment)
self.ccardRb.setGeometry(280, 100, 120, 20)
self.cheqRb = QRadioButton(Payment)
self.cheqRb.setGeometry(280, 130, 120, 20)
self.transRb = QRadioButton(Payment)
self.transRb.setGeometry(280, 160, 120, 20)
self.directRb = QRadioButton(Payment)
self.directRb.setGeometry(280, 190, 120, 20)
self.buttonBox = QDialogButtonBox(Payment)
self.buttonBox.setGeometry(40, 240, 341, 32)
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(
QDialogButtonBox.Cancel|QDialogButtonBox.Ok)
self.buttonBox.setCenterButtons(True)
self.paymentLb.setBuddy(self.paymentSb)
self.retranslateUi(Payment)
def retranslateUi(self, Payment):
Payment.setWindowTitle(tl("GnuVet: Payment"))
self.curbalLb.setText(tl("Current Balance:"))
self.curbal.setText(tl("0.00"))
self.totbalLb.setText(tl("Total Balance:"))
self.totbal.setText(tl("0.00"))
self.paymentLb.setText(tl("&Payment:"))
self.changeLb.setText(tl("Change:"))
self.change.setText(tl("0.00"))
self.printinvCb.setText(tl("Pri&nt invoice"))
self.printrecCb.setText(tl("Print r&eceipt"))
self.modeLb.setText(tl("Payment Mode:"))
self.cashRb.setText(tl("&Cash"))
self.dcardRb.setText(tl("&Debit Card"))
self.ccardRb.setText(tl("C&redit Card"))
self.cheqRb.setText(tl("C&heque"))
self.transRb.setText(tl("&Transfer"))
self.directRb.setText(tl("D&irect Debit"))
if __name__ == "__main__":
from PyQt4.QtGui import QMainWindow, QShortcut
a = QApplication([])
a.setStyle('plastique')
b = Ui_Payment()
w = QMainWindow()
b.setupUi(w)
QShortcut('Ctrl+W', w, quit)
w.show()
exit(a.exec_())
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import threading
from django.conf import settings
from django.core.management.base import BaseCommand
from django.test.signals import template_rendered
from django_nose.runner import NoseTestSuiteRunner, translate_option
from mako import runtime
from mako.template import Template
__all__ = ['HueTestRunner']
# Capturing the mako context is not thread safe, so we wrap rendering in a mutex.
_MAKO_LOCK = threading.RLock()
def _instrumented_test_render(self, *args, **data):
"""
An instrumented Template render method, providing a signal
that can be intercepted by the test system Client
"""
with _MAKO_LOCK:
def mako_callable_(context, *args, **kwargs):
template_rendered.send(sender=self, template=self, context=context)
return self.original_callable_[-1](context, *args, **kwargs)
if hasattr(self, 'original_callable_'):
self.original_callable_.append(self.callable_)
else:
self.original_callable_ = [self.callable_]
self.callable_ = mako_callable_
try:
response = runtime._render(self, self.original_callable_[-1], args, data)
finally:
self.callable_ = self.original_callable_.pop()
return response
class HueTestRunner(NoseTestSuiteRunner):
__test__ = False
def setup_test_environment(self, **kwargs):
super(HueTestRunner, self).setup_test_environment(**kwargs)
Template.original_render = Template.render
Template.render = _instrumented_test_render
def teardown_test_environment(self, **kwargs):
super(HueTestRunner, self).teardown_test_environment(**kwargs)
Template.render = Template.original_render
del Template.original_render
def run_tests(self, test_labels, *args):
nose_argv = (['nosetests'] + list(test_labels))
if args:
nose_argv.extend(args)
if hasattr(settings, 'NOSE_ARGS'):
nose_argv.extend(settings.NOSE_ARGS)
# Skip over 'manage.py test' and any arguments handled by django.
django_opts = ['--noinput', '--liveserver', '-p', '--pattern']
for opt in BaseCommand.option_list:
django_opts.extend(opt._long_opts)
django_opts.extend(opt._short_opts)
nose_argv.extend(translate_option(opt) for opt in sys.argv[1:]
if opt.startswith('-') and not any(opt.startswith(d) for d in django_opts))
# if --nose-verbosity was omitted, pass Django verbosity to nose
if ('--verbosity' not in nose_argv and not any(opt.startswith('--verbosity=') for opt in nose_argv)):
nose_argv.append('--verbosity=%s' % str(self.verbosity))
if self.verbosity >= 1:
print(' '.join(nose_argv))
result = self.run_suite(nose_argv)
# suite_result expects the suite as the first argument. Fake it.
return self.suite_result({}, result)
|
from django.conf import settings
from django.contrib.sessions.models import Session
from django.contrib.sessions.backends.base import SessionBase
from django.core.exceptions import SuspiciousOperation
import datetime
class SessionStore(SessionBase):
"""
Implements database session store
"""
def __init__(self, session_key=None):
super(SessionStore, self).__init__(session_key)
def load(self):
try:
s = Session.objects.get(
session_key = self.session_key,
expire_date__gt=datetime.datetime.now()
)
return self.decode(s.session_data)
except (Session.DoesNotExist, SuspiciousOperation):
# Create a new session_key for extra security.
self.session_key = self._get_new_session_key()
self._session_cache = {}
# Save immediately to minimize collision
self.save()
# Ensure the user is notified via a new cookie.
self.modified = True
return {}
def exists(self, session_key):
try:
Session.objects.get(session_key=session_key)
except Session.DoesNotExist:
return False
return True
def save(self):
Session.objects.create(
session_key = self.session_key,
session_data = self.encode(self._session),
expire_date = datetime.datetime.now() + datetime.timedelta(seconds=settings.SESSION_COOKIE_AGE)
)
def delete(self, session_key):
try:
Session.objects.get(session_key=session_key).delete()
except Session.DoesNotExist:
pass
|
/***************************************************************************
qgsglobetilesource.h
---------------------
begin : August 2010
copyright : (C) 2010 by Pirmin Kalberer
(C) 2015 Sandro Mani
email : pka at sourcepole dot ch
smani at sourcepole dot ch
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#ifndef QGSGLOBETILESOURCE_H
#define QGSGLOBETILESOURCE_H
#include <osgEarth/TileSource>
#include <osg/ImageStream>
#include <QImage>
#include <QStringList>
#include <QLabel>
#include <QMutex>
#include "qgsrectangle.h"
//#define GLOBE_SHOW_TILE_STATS
class QgsCoordinateTransform;
class QgsMapCanvas;
class QgsMapRenderer;
class QgsMapSettings;
class QgsGlobeTileSource;
class QgsMapRendererParallelJob;
class QgsGlobeTileStatistics : public QObject
{
Q_OBJECT
public:
QgsGlobeTileStatistics();
~QgsGlobeTileStatistics() { s_instance = 0; }
static QgsGlobeTileStatistics* instance() { return s_instance; }
void updateTileCount( int change );
void updateQueueTileCount( int change );
signals:
void changed( int queued, int tot );
private:
static QgsGlobeTileStatistics* s_instance;
QMutex mMutex;
int mTileCount;
int mQueueTileCount;
};
int getTileCount();
class QgsGlobeTileImage : public osg::Image
{
public:
QgsGlobeTileImage( QgsGlobeTileSource* tileSource, const QgsRectangle& tileExtent, int tileSize, int tileLod );
~QgsGlobeTileImage();
bool requiresUpdateCall() const { return !mUpdatedImage.isNull(); }
QgsMapSettings createSettings( int dpi, const QStringList &layerSet ) const;
void setUpdatedImage( const QImage& image ) { mUpdatedImage = image; }
int dpi() const { return mDpi; }
const QgsRectangle& extent() { return mTileExtent; }
void update( osg::NodeVisitor * );
static bool lodSort( const QgsGlobeTileImage* lhs, const QgsGlobeTileImage* rhs ) { return lhs->mLod > rhs->mLod; }
private:
QgsGlobeTileSource* mTileSource;
QgsRectangle mTileExtent;
int mTileSize;
unsigned char* mTileData;
int mLod;
int mDpi;
QImage mUpdatedImage;
};
class QgsGlobeTileUpdateManager : public QObject
{
Q_OBJECT
public:
QgsGlobeTileUpdateManager( QObject* parent = 0 );
~QgsGlobeTileUpdateManager();
void updateLayerSet( const QStringList& layerSet ) { mLayerSet = layerSet; }
void addTile( QgsGlobeTileImage* tile );
void removeTile( QgsGlobeTileImage* tile );
signals:
void startRendering();
void cancelRendering();
private:
QStringList mLayerSet;
QList<QgsGlobeTileImage*> mTileQueue;
QgsGlobeTileImage* mCurrentTile;
QgsMapRendererParallelJob* mRenderer;
private slots:
void start();
void cancel();
void renderingFinished();
};
class QgsGlobeTileSource : public osgEarth::TileSource
{
public:
QgsGlobeTileSource( QgsMapCanvas* canvas, const osgEarth::TileSourceOptions& options = osgEarth::TileSourceOptions() );
Status initialize( const osgDB::Options *dbOptions ) override;
osg::Image* createImage( const osgEarth::TileKey& key, osgEarth::ProgressCallback* progress ) override;
osg::HeightField* createHeightField( const osgEarth::TileKey &/*key*/, osgEarth::ProgressCallback* /*progress*/ ) override { return 0; }
bool isDynamic() const override { return true; }
void refresh( const QgsRectangle &dirtyExtent );
void setLayerSet( const QStringList& layerSet );
const QStringList &layerSet() const;
private:
friend class QgsGlobeTileImage;
QMutex mTileListLock;
QList<QgsGlobeTileImage*> mTiles;
QgsMapCanvas* mCanvas;
QStringList mLayerSet;
QgsGlobeTileUpdateManager mTileUpdateManager;
void addTile( QgsGlobeTileImage* tile );
void removeTile( QgsGlobeTileImage* tile );
};
#endif // QGSGLOBETILESOURCE_H
|
"""
Django settings for django_workflow project.
Generated by 'django-admin startproject' using Django 1.8.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$kzyndoqi7kjm9_ld@_=%)3j)4dgzki8*)d+x9wy4rh*8s2va%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_workflow',
'simple_approval',
'graphene_django',
'django_filters'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'django_workflow.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_workflow.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_DIR, 'static')
STATIC_URL = '/static/'
|
export function hmget(key: string, ...fields: Array<any>) {
const hash = this.data.get(key);
return fields.map((field) => {
if (!hash || hash[field] === undefined) {
return null;
}
return hash[field];
});
}
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualMachineScaleSetStorageProfile(Model):
"""Describes a virtual machine scale set storage profile.
:param image_reference: Specifies information about the image to use. You
can specify information about platform images, marketplace images, or
virtual machine images. This element is required when you want to use a
platform image, marketplace image, or virtual machine image, but is not
used in other creation operations.
:type image_reference:
~azure.mgmt.compute.v2017_12_01.models.ImageReference
:param os_disk: Specifies information about the operating system disk used
by the virtual machines in the scale set. <br><br> For more information
about disks, see [About disks and VHDs for Azure virtual
machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
:type os_disk:
~azure.mgmt.compute.v2017_12_01.models.VirtualMachineScaleSetOSDisk
:param data_disks: Specifies the parameters that are used to add data
disks to the virtual machines in the scale set. <br><br> For more
information about disks, see [About disks and VHDs for Azure virtual
machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
:type data_disks:
list[~azure.mgmt.compute.v2017_12_01.models.VirtualMachineScaleSetDataDisk]
"""
_attribute_map = {
'image_reference': {'key': 'imageReference', 'type': 'ImageReference'},
'os_disk': {'key': 'osDisk', 'type': 'VirtualMachineScaleSetOSDisk'},
'data_disks': {'key': 'dataDisks', 'type': '[VirtualMachineScaleSetDataDisk]'},
}
def __init__(self, image_reference=None, os_disk=None, data_disks=None):
super(VirtualMachineScaleSetStorageProfile, self).__init__()
self.image_reference = image_reference
self.os_disk = os_disk
self.data_disks = data_disks
|
import { filter, propType } from 'graphql-anywhere';
import { gql } from 'react-apollo';
import graphql from '../util/graphql';
import UserPreview from '../components/UserPreview';
const Users = ({ data: { allUsers } }) => (
<div className="wrapper">
<style jsx>{`
.wrapper {
display: flex;
justify-content: center;
}
`}</style>
{allUsers
.map(u => <UserPreview key={u.id} user={filter(UserPreview.fragments.user, u)} />)}
</div>
);
const query = gql`
query {
allUsers(
filter: {
OR: [{
tracks_some: {
id_not: "nonExistentId"
}
}, {
facilitatedRetreats_some: {
id_not: "nonExistentId"
}
}]
}
) {
id
...UserPreview
}
}
${UserPreview.fragments.user}
`;
Users.propTypes = {
data: propType(query).isRequired,
};
export default graphql(query)(Users);
|
<?php
/*
* This file is part of the prooph/event-store.
* (c) 2014 - 2015 prooph software GmbH <contact@prooph.de>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* Date: 10/09/15 - 07:20 PM
*/
namespace Prooph\EventStore\Snapshot\Adapter;
use Prooph\EventStore\Aggregate\AggregateType;
use Prooph\EventStore\Snapshot\Snapshot;
/**
* Class InMemoryAdapter
* @package Prooph\EventStore\Snapshot\Adapter
*/
final class InMemoryAdapter implements Adapter
{
/**
* @var array
*/
private $map = [];
/**
* Get the aggregate root if it exists otherwise null
*
* @param AggregateType $aggregateType
* @param string $aggregateId
* @return null|object
*/
public function get(AggregateType $aggregateType, $aggregateId)
{
if (! isset($this->map[$aggregateType->toString()][$aggregateId])) {
return;
}
return $this->map[$aggregateType->toString()][$aggregateId];
}
/**
* Save a snapshot
*
* @param Snapshot $snapshot
* @return void
*/
public function save(Snapshot $snapshot)
{
$this->map[$snapshot->aggregateType()->toString()][$snapshot->aggregateId()] = $snapshot;
}
}
|
#!/usr/bin/env python
'''Light submodule for dGraph scene description module
David Dunn
Jan 2017 - created by splitting off from dGraph
ALL UNITS ARE IN METRIC
ie 1 cm = .01
www.qenops.com
'''
__author__ = ('David Dunn')
__version__ = '1.6'
__all__ = ["Light", "PointLight", "DirectionLight"]
from dGraph import *
import dGraph.shaders as dgshdr
import numpy as np
from numpy.linalg import norm
class Light(object):
''' A world object that casts light
Intensity
Color
'''
def __init__(self, intensity=(1,1,1), **kwargs):
super(Light, self).__init__(**kwargs)
self._intensity = np.array(intensity, np.float32)
def fragmentShader(self, index):
pass
def pushToShader(self, index, shader):
pass
class PointLight(Light):
''' A light with falloff '''
def __init__(self, position = (0,0,0), **kwargs):
super(PointLight, self).__init__(**kwargs)
self._position = np.array(position, np.float32)
def fragmentShader(self, index):
return '''
uniform vec3 light{index}_intensity;
uniform vec3 light{index}_position;
vec3 getLightDirection{index}(vec3 worldLocation) {{
return normalize(light{index}_position - worldLocation);
}}
vec3 getLightIntensity{index}(vec3 worldLocation) {{
return light{index}_intensity;
}}
'''.format(index = index)
def pushToShader(self, index, shader):
#import pdb; pdb.set_trace();
dgshdr.setUniform(shader, 'light{index}_intensity'.format(index=index), np.array(self._intensity, np.float32))
dgshdr.setUniform(shader, 'light{index}_position'.format(index=index), np.array(self._position, np.float32))
class DirectionLight(Light):
''' A light where position doesn't matter, only a direction vector '''
def __init__(self, direction=(0.,0.,1.0), **kwargs):
super(DirectionLight, self).__init__(**kwargs)
self._direction = np.array(direction, np.float32)
def fragmentShader(self, index):
return '''
uniform vec3 light{index}_intensity;
uniform vec3 light{index}_direction;
vec3 getLightDirection{index}(vec3 worldLocation) {{
return normalize(light{index}_direction);
}}
vec3 getLightIntensity{index}(vec3 worldLocation) {{
return light{index}_intensity;
}}
'''.format(index = index)
def pushToShader(self, index, shader):
#import pdb; pdb.set_trace();
dgshdr.setUniform(shader, 'light{index}_intensity'.format(index=index), np.array(self._intensity, np.float32))
dgshdr.setUniform(shader, 'light{index}_direction'.format(index=index), np.array(self._direction, np.float32))
|
from core import GoogleService
import apiclient
class DriveService(GoogleService):
def __init__(self, credentials):
"""
"""
super(DriveService, self).__init__('drive', 'v3', credentials)
def list_files(self, page_size=10):
"""
"""
list_service = self.service.files().list(pageSize=page_size, fields="nextPageToken, files(id, name)")
results = list_service.execute()
return results.get('files', [])
def create_file_metadata(self, file_name, mime_type):
"""
"""
return {
'name': file_name,
'mimeType': mime_type
}
def create_media(self, file_name, mime_type, resumable=True):
"""
"""
return apiclient.http.MediaFileUpload(file_name, mimetype=mime_type, resumable=resumable)
def create_file(self, file_name, mime_type):
"""
"""
file_metadata = self.create_file_metadata(file_name, mime_type)
media = self.create_media(file_name, mime_type)
create_service = self.service.files().create(body=file_metadata, media_body=media, fields='id')
create_service.execute()
|
using System;
using System.ComponentModel.DataAnnotations;
using KebabManager.Common.Enums;
using Pizza.Contracts.Default;
namespace KebabManager.Contracts.ViewModels.Customers
{
public sealed class CustomerDetailsModel : DetailsModelBase
{
[Display(Name = "Login")]
public string Login { get; set; }
[Display(Name = "First name")]
public string FirstName { get; set; }
[Display(Name = "Last name")]
public string LastName { get; set; }
[Display(Name = "Fingers count")]
public int FingersCount { get; set; }
[Display(Name = "Previous surgery date")]
public DateTime PreviousSurgeryDate { get; set; }
[Display(Name = "Customer type")]
public CustomerType Type { get; set; }
}
}
|
/*
* Copyright (C) 2014 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef SVGPointList_h
#define SVGPointList_h
#include "core/svg/SVGParsingError.h"
#include "core/svg/SVGPoint.h"
#include "core/svg/properties/SVGListPropertyHelper.h"
namespace blink {
class SVGPointListTearOff;
class SVGPointList final : public SVGListPropertyHelper<SVGPointList, SVGPoint> {
public:
typedef SVGPointListTearOff TearOffType;
static PassRefPtrWillBeRawPtr<SVGPointList> create()
{
return adoptRefWillBeNoop(new SVGPointList());
}
~SVGPointList() override;
SVGParsingError setValueAsString(const String&);
// SVGPropertyBase:
String valueAsString() const override;
void add(PassRefPtrWillBeRawPtr<SVGPropertyBase>, SVGElement*) override;
void calculateAnimatedValue(SVGAnimationElement*, float percentage, unsigned repeatCount, PassRefPtrWillBeRawPtr<SVGPropertyBase> fromValue, PassRefPtrWillBeRawPtr<SVGPropertyBase> toValue, PassRefPtrWillBeRawPtr<SVGPropertyBase> toAtEndOfDurationValue, SVGElement*) override;
float calculateDistance(PassRefPtrWillBeRawPtr<SVGPropertyBase> to, SVGElement*) override;
static AnimatedPropertyType classType() { return AnimatedPoints; }
private:
SVGPointList();
template <typename CharType>
SVGParsingError parse(const CharType*& ptr, const CharType* end);
};
DEFINE_SVG_PROPERTY_TYPE_CASTS(SVGPointList);
} // namespace blink
#endif // SVGPointList_h
|
from OpenGL.GLU import *
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.arrays import vbo
from OpenGL.GL import shaders
from OpenGL.GL.ARB.color_buffer_float import *
from OpenGL.raw.GL.ARB.color_buffer_float import *
import numpy as np
import random
from PIL import Image
import random
class Texture:
def __init__(self, name, tformat=GL_RGBA, wrap=GL_CLAMP_TO_EDGE, tfilter=GL_NEAREST, ttype=GL_UNSIGNED_BYTE, tinternal_format=GL_RGBA):
self.name = name
self.wrap = wrap
self.filter = tfilter
self.format = tformat
self.type = ttype
self.texture = glGenTextures(1)
self.internal_format = tinternal_format
glPixelStorei(GL_UNPACK_ALIGNMENT, 1)
glBindTexture(GL_TEXTURE_2D, self.texture)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, self.wrap)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, self.wrap)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, self.filter)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, self.filter)
def dtype(self):
return 'float32' if self.type == GL_FLOAT else 'uint8'
def read(self, width, height):
dtype = self.dtype()
num_items = width*height*4
glBindTexture(GL_TEXTURE_2D, self.texture);
if dtype == 'uint8':
return np.frombuffer(glGetTexImage(GL_TEXTURE_2D, 0, self.format, self.type), np.uint8)
else:
return np.frombuffer(glGetTexImage(GL_TEXTURE_2D, 0, self.format, self.type), np.float32)
def blank(self, width, height):
source_copy = np.array([[0, 0, 0, 255] for i in xrange(0, int(width*height))], copy=True)
glBindTexture(GL_TEXTURE_2D, self.texture);
glTexImage2D(GL_TEXTURE_2D, 0, self.internal_format, width, height, 0, self.format, self.type, source_copy)
def set(self, source, width, height):
source_copy = np.array(source, dtype=self.dtype(), copy=True)
glBindTexture(GL_TEXTURE_2D, self.texture);
glTexImage2D(GL_TEXTURE_2D, 0, self.internal_format, width, height, 0, self.format, self.type, source_copy)
def noise(self, width, height):
source_copy = np.array([[int(255*random.random()), int(255*random.random()), int(255*random.random()), int(255*random.random())] for i in xrange(0, int(width*height))], copy=True)
glBindTexture(GL_TEXTURE_2D, self.texture);
glTexImage2D(GL_TEXTURE_2D, 0, self.internal_format, width, height, 0, self.format, self.type, source_copy)
|
import numpy as np
import math
import ctypes
import rnn.kernel as algo
class SoftmaxCrossEntropy(object):
def __init__(self):
self.Y = None
self.dX = None
def __getstate__(self):
return True
def __setstate__(self, x):
self.Y = None
self.dX = None
def weights(self, dtype=np.float64):
return np.zeros(0, dtype=dtype)
@property
def shape(self): return 0
@property
def size(self): return 0
@property
def dW(self): return None
def reset(self):
pass
def advance(self):
pass
def resize_fw(self, X):
if self.Y is None:
self.Y = np.zeros(X.shape, dtype=X.dtype)
else:
if X.dtype != self.Y.dtype:
self.Y = self.Y.astype(X.dtype, copy=False)
if X.shape != self.Y.shape:
self.Y.resize(X.shape, refcheck=False)
def forward(self, W, X, train=None):
self.resize_fw(X)
algo.softmaxfw(X, self.Y)
return self.Y
def resize_bw(self):
if self.dX is None:
self.dX = np.zeros(self.Y.shape, dtype=self.Y.dtype)
else:
if self.dX.dtype != self.Y.dtype:
self.dX = self.dX.astype(self.Y.dtype, copy=False)
if self.dX.shape != self.Y.shape:
self.dX.resize(self.Y.shape)
def backward(self, W, Y, dW):
self.resize_bw()
algo.entmaxbw(self.Y, Y, self.dX)
return self.dX
def cross_entropy( yh, y ):
return algo.centfw(yh, y)
class CrossEntropyError:
def __init__(self, dtype):
self.dtype = dtype
def init(self, shape, dtype=None):
(k,b,n) = shape
self.y = np.random.randint(n, size=(k,b))
return self
def __call__(self, yh):
return cross_entropy(yh, self.y)
def grad(self, yh):
return self.y
def test():
import test
size = 4
layer = SoftmaxCrossEntropy()
error_f = CrossEntropyError(np.int32)
test.layer(layer, ins=size, outs=size, err_f=error_f, float_t = np.float64)
test.layer(layer, ins=size, outs=size, err_f=error_f, float_t = np.float32)
error_f = CrossEntropyError(np.int64)
test.layer(layer, ins=size, outs=size, err_f=error_f, float_t=np.float64)
test.layer(layer, ins=size, outs=size, err_f=error_f, float_t=np.float32)
if __name__ == '__main__':
test()
|
#!/usr/bin/python
# license:BSD-3-Clause
# copyright-holders:Olivier Galibert
import sys
import re
# Initial state
state = 1
text = ""
dispatch_to_states = { "MAIN" : 0 }
states_to_dispatch = { 0 : "MAIN" }
def load_file(fname, lines):
path = fname.rpartition('/')[0]
if path != "":
path += '/'
try:
f = open(fname, "rU")
except Exception:
err = sys.exc_info()[1]
sys.stderr.write("Cannot read opcodes file %s [%s]\n" % (fname, err))
sys.exit(1)
rawlines = re.split('(\n|; *\n?)', f.read())
count = 0
while count < len(rawlines)-1:
line = rawlines[count+0] + rawlines[count+1]
if line.startswith("#include"):
load_file(path + line.split('"')[1], lines)
else:
lines.append(line)
count += 2
f.close()
# Get lines
lines = []
load_file(sys.argv[1], lines)
count = 0
while count < len(lines):
# Retrieve this line
line = lines[count]
# Retrieve the whitespace
whitespace = line[:len(line) - len(line.lstrip())]
# Check to see if the next line is a return
next_line_is_return = (count + 1 == len(lines)) or lines[count+1].strip() == "return;"
# Check to see if the next line is a dispatch followed by return
next_line_is_dispatch_and_return = (count + 1 < len(lines)) and re.match('([A-Za-z0-9\t ]+\:)*\s*\%', lines[count+1]) and lines[count+2].strip() == "return;"
if re.match('([A-Za-z0-9\t ]+\:)*\s*\%', line):
# This is a dispatch - find the '%'
percent_pos = line.find("%")
dispatch = line[percent_pos+1:].strip("\t\n; ")
# Do we have a label?
label = line[:percent_pos].strip()
if label != "":
text += whitespace + label + "\n"
whitespace += "\t"
# Create the goto command
if dispatch[-1:] == "*":
goto_command = "if (is_register_register_op_16_bit()) goto %s16; else goto %s8;\n" %(dispatch[:-1], dispatch[:-1])
else:
goto_command = "goto %s;\n" % dispatch
# Are we right before a 'return'?
if next_line_is_return:
text += whitespace + goto_command
count += 1 # Skip the return
elif next_line_is_dispatch_and_return:
# We are followed by a dispatch/return combo; identify the next dispatch
percent_pos = lines[count+1].find("%")
next_dispatch = lines[count+1][percent_pos+1:].strip("\t\n; ")
# If there is no state number associated with the next dispatch, make one
if next_dispatch not in dispatch_to_states:
dispatch_to_states[next_dispatch] = state
states_to_dispatch[state] = next_dispatch
state += 1
text += whitespace + "push_state(%s);\t// %s\n" % (dispatch_to_states[next_dispatch], next_dispatch)
text += whitespace + goto_command
count += 2 # Skip the dispatch/return
else:
# Normal dispatch
text += whitespace + "push_state(%s);\n" % state
text += whitespace + goto_command
text += "state_%s:\n" % state
state += 1
else:
# "Normal" code
# Is there an '@' here?
check_icount = line.lstrip().startswith("@")
if check_icount:
line = line.replace("@", "", 1)
# Output the line
text += line
# If we have to decrement the icount, output more info
if check_icount and not next_line_is_return:
text += whitespace + "if (UNEXPECTED(m_icount <= 0)) { push_state(%s); return; }\n" % state
text += "state_%s:\n" % state
state += 1
# Advance to next line
count += 1
# Output the case labels
for i in range(0, state):
print("\tcase %d: goto %s;" % (i, states_to_dispatch.get(i, "state_%d" % i)))
# Output a default case
print("\tdefault:")
print("\t\tfatalerror(\"Unexpected state\");")
print("\t\tbreak;")
print("")
# Finally output the text
print(text)
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<title>Rgaa30 Test.4-1-3 NMI 01</title>
</head>
<body>
<div>
<h1>Rgaa30 Test.4-1-3 NMI 01</h1>
<div class="test-detail" lang="fr"> Chaque
<a href="http://references.modernisation.gouv.fr/referentiel-technique-0#mMediaTemp">média temporel</a> synchronisé pré-enregistré vérifie-t-il, si nécessaire, une de ces conditions (
<a href="http://references.modernisation.gouv.fr/referentiel-technique-0#cpCrit4-" title="Cas particuliers pour le critère 4.1">hors cas particuliers</a>) ?
<ul class="ssTests">
<li> Il existe une <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#mTranscriptTextuel">transcription textuelle</a> accessible via un <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#mLienAdj">lien adjacent</a> (une <code>url</code> ou une <code>ancre</code>)</li>
<li> Il existe une <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#mTranscriptTextuel">transcription textuelle</a> adjacente clairement identifiable</li>
<li> Il existe une <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#mAudioDesc">audio-description</a> synchronisée</li>
<li> Il existe une version alternative avec une <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#mAudioDesc">audio-description</a> synchronisée accessible via un <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#mLienAdj">lien adjacent</a> (une <code>url</code> ou une <code>ancre</code>)</li>
</ul>
</div>
<div class="testcase">
<video src="url/video.webm" controls autoplay height="300" width="400" ></video>
<audio controls>
<source src="audio.ogg" type="audio/ogg">
<source src="audio.mp3" type="audio/mpeg">
<p>Fallback en cas d'incompatibilité du browser</p>
</audio>
</div>
<div class="test-explanation">
NMI.
</div>
</div>
</body>
</html>
|
#!/usr/bin/env python
# fixpax - fix ownership in bdist_mpkg output
#
# Copyright 2015 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# MIT license (http://opensource.org/licenses/MIT)
"""Set file ownership to 0 in an Archive.pax.gz.
Suitable for fixing files bdist_mpkg output:
*.mpkg/Contents/Packages/*.pkg/Contents/Archive.pax.gz
"""
from __future__ import absolute_import, print_function
import gzip
import os
import sys
def fixpax(iname, oname):
i = gzip.GzipFile(iname)
o = gzip.GzipFile(oname, "w")
while True:
magic = i.read(6)
dev = i.read(6)
ino = i.read(6)
mode = i.read(6)
i.read(6) # uid
i.read(6) # gid
nlink = i.read(6)
rdev = i.read(6)
mtime = i.read(11)
namesize = i.read(6)
filesize = i.read(11)
name = i.read(int(namesize, 8))
data = i.read(int(filesize, 8))
o.write(magic)
o.write(dev)
o.write(ino)
o.write(mode)
o.write("000000")
o.write("000000")
o.write(nlink)
o.write(rdev)
o.write(mtime)
o.write(namesize)
o.write(filesize)
o.write(name)
o.write(data)
if name.startswith("TRAILER!!!"):
o.write(i.read())
break
o.close()
i.close()
if __name__ == '__main__':
for iname in sys.argv[1:]:
print('fixing file ownership in %s' % iname)
oname = sys.argv[1] + '.tmp'
fixpax(iname, oname)
os.rename(oname, iname)
|
'''
Created on Jan 5, 2016
@author: Dmitry
'''
import os
import sys
from twisted.python import filepath
def spawnProcess(proto, sibling, *args, **kw):
"""
Launch a child Python process and communicate with it using the
given ProcessProtocol.
@param proto: A L{ProcessProtocol} instance which will be connected
to the child process.
@param sibling: The basename of a file containing the Python program
to run in the child process.
@param *args: strings which will be passed to the child process on
the command line as C{argv[2:]}.
@param **kw: additional arguments to pass to L{reactor.spawnProcess}.
@return: The L{IProcessTransport} provider for the spawned process.
"""
from twisted.internet import reactor
import twisted
subenv = dict(os.environ)
subenv['PYTHONPATH'] = os.pathsep.join(
[os.path.abspath(
os.path.dirname(os.path.dirname(twisted.__file__))),
';'.join(sys.path),
])
args = [sys.executable,
filepath.FilePath(__file__).sibling(sibling).path#,
#reactor.__class__.__module__]
]+ list(args)
print(args)
return reactor.spawnProcess(
proto,
sys.executable,
args,
env=subenv,
**kw)
|
@extends('Layouts.master')
@section('head')
<title>Scholarship Interface Reset Password</title>
<link rel="stylesheet" type="text/css" href="{{asset('/css/Global/ResetPassword/cellphone.css') }}">
@parent
@stop
@section('content')
<div id="cellPhoneTokenInput">
{{ Form::open(array('url' => route('password.reset.doCellPhone', array($token)), 'method' => 'POST',
'accept-charset' => 'UTF-8')) }}
{{ Form::label('cellToken', 'Cell Phone Token') }}
<br>
{{ Form::text('cellToken', '', array('placeholder' => 'Token', 'autocomplete' => 'off')) }}
<br>
<font color="red">{{ $errors -> first('cellToken')}}</font>
<br>
{{ Form::submit('Verify Token', array('class' => 'btn btn-primary')) }}
{{ Form::close() }}
</div>
@stop
|
using GraphQLParser.AST;
namespace GraphQLParser.Visitors;
/// <summary>
/// Context used by <see cref="SDLPrinter{TContext}"/> and <see cref="StructurePrinter{TContext}"/>.
/// </summary>
public interface IPrintContext : IASTVisitorContext
{
/// <summary>
/// A text writer to print document.
/// </summary>
TextWriter Writer { get; }
/// <summary>
/// Stack of AST nodes to track the current visitor position.
/// </summary>
Stack<ASTNode> Parents { get; }
/// <summary>
/// Tracks the current indent level.
/// </summary>
int IndentLevel { get; set; }
/// <summary>
/// Indicates whether last GraphQL AST definition node (executable definition,
/// type system definition or type system extension) from printed document was
/// actualy printed. This property is required to properly print vertical
/// indents between definitions.
/// </summary>
bool LastDefinitionPrinted { get; set; }
}
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import datetime
import itertools
import string
import factory
from base.models.enums import entity_type, organization_type
from base.tests.factories.entity import EntityFactory
def generate_acronyms():
acronyms_letters_generator = itertools.permutations(string.ascii_uppercase, r=4)
for acronym_letters in acronyms_letters_generator:
yield "".join(acronym_letters)
class EntityVersionFactory(factory.DjangoModelFactory):
class Meta:
model = 'base.EntityVersion'
entity = factory.SubFactory(EntityFactory)
title = factory.Faker('company')
acronym = factory.Iterator(generate_acronyms())
entity_type = factory.Iterator(entity_type.ENTITY_TYPES, getter=lambda c: c[0])
parent = factory.SubFactory(EntityFactory)
start_date = datetime.date(2015, 1, 1).isoformat()
end_date = None
class MainEntityVersionFactory(EntityVersionFactory):
entity = factory.SubFactory(EntityFactory, organization__type=organization_type.MAIN)
entity_type = factory.Iterator(entity_type.PEDAGOGICAL_ENTITY_TYPES)
|
# -*- coding: utf-8 -*-
#pickle用于进程或者网络间传输程序状态的数据
#简单来说就是把对象序列化成一串字符,然后传输,然后由这么一段字符再还原成原来的状态
import pickle
d = dict(name = 'bob',age = 20,score = 90)
p = pickle.dumps(d)
print('下面是序列化之后的内容')
print(p)
#这里通过dumps来将dict对象进行序列化
print('下面是序列化之还原后的内容')
print(pickle.loads(p))
#这里通过loads来进行还原
f = open('dump.txt','wb')
pickle.dump(d,f)
f.close()
print('读取文件序列化内容')
with open('dump.txt','rb') as f:
f.read()
#这里是新建一个文件,并且把序列化的d字典,存储到f当中
#再次注意,在进行open的时候,必须定义open的方式才能进行对应的read或者write操作
#如wb即代表写二进制数据,此时无法进行读取操作
#这里将文件进行序列化,同时上例中的字典d,写入文件
f = open('dump.txt','rb')
d = pickle.load(f)
f.close()
print('读取序列化还原内容')
print(d)
#读取文件f,并还原序列化的内容
#这里还原文件,但是文件内容不变,因为只是读取,而非修改
import json
class Student(object):
def __init__(self,name,age,score):
self.name = name
self.age = age
self.score = score
s = Student('bob',20,90)
#print(json.dumps(s))
#错误的原因是因为json接受字典类型的变量来转换,而类的实力显然返回的不是字典
print(dir(s))
print(json.dumps(s.__dict__))
f = json.dumps(s.__dict__)
print(json.dumps(s,default = lambda obj:obj.__dict__))
#一般实例都会拥有一个__dict__的属性,可以把实例转换成dict模式,专门用于json的转换
print(f)
print(json.loads(f))
#这里直接通过loads将序列化后的f转换成一个dict,是单引号的
#但是dict仍然不是标准的类实例,需要有一个方法将dict转换成实例
def dict2student(d):
return Student(d['name'],d['age'],d['score'])
#这里通过字典的key-vale转换,传入一个字典,同时d['name']获取字典得值,放在Student当中,成为一个实例
print(json.loads(f,object_hook = dict2student))
#loads有一个object_hook方法,来传入转换函数,转换f从字典到实例
|
/*
* This file is part of the CMaNGOS Project. See AUTHORS file for Copyright information
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __OBJECT_VISIBILITY_H
#define __OBJECT_VISIBILITY_H
#include "Common.h"
class WorldObject;
class Unit;
enum class VisibilityDistanceType : uint32
{
Normal = 0,
Tiny = 1,
Small = 2,
Large = 3,
Gigantic = 4,
Infinite = 5,
Max
};
enum StealthType : uint32
{
STEALTH_UNIT = 0,
STEALTH_TRAP = 1,
STEALTH_TYPE_MAX,
};
enum InvisibilityType : uint32
{
INVISIBILITY_TRAP = 3,
INVISIBILITY_DRUNK = 6,
INVISIBILITY_TYPE_MAX = 32,
};
class VisibilityData
{
public:
VisibilityData(WorldObject* owner);
// visibility
bool IsVisibilityOverridden() const { return m_visibilityDistanceOverride != 0.f; }
void SetVisibilityDistanceOverride(VisibilityDistanceType type);
float GetVisibilityDistance() const;
float GetVisibilityDistanceFor(WorldObject* obj) const;
// invisibility
bool CanDetectInvisibilityOf(WorldObject const* u) const;
uint32 GetInvisibilityDetectMask() const;
void SetInvisibilityDetectMask(uint32 index, bool apply);
uint32 GetInvisibilityMask() const;
void SetInvisibilityMask(uint32 index, bool apply);
void SetInvisibilityValue(uint32 index, int32 value) { m_invisibilityValues[index] = value; }
void AddInvisibilityValue(uint32 index, int32 value) { m_invisibilityValues[index] += value; }
void SetInvisibilityDetectValue(uint32 index, int32 value) { m_invisibilityDetectValues[index] = value; }
void AddInvisibilityDetectValue(uint32 index, int32 value) { m_invisibilityDetectValues[index] += value; }
int32 GetInvisibilityValue(uint32 index) const;
int32 GetInvisibilityDetectValue(uint32 index) const;
// stealth
uint32 GetStealthMask() const { return m_stealthMask; }
void SetStealthMask(uint32 index, bool apply);
void AddStealthStrength(StealthType type, uint32 value) { m_stealthStrength[type] += value; }
void AddStealthDetectionStrength(StealthType type, uint32 value) { m_stealthDetectStrength[type] += value; }
uint32 GetStealthStrength(StealthType type) const { return m_stealthStrength[type]; }
uint32 GetStealthDetectionStrength(StealthType type) const { return m_stealthDetectStrength[type]; }
float GetStealthVisibilityDistance(Unit const* target, bool alert = false) const;
private:
// visibility
float m_visibilityDistanceOverride;
// invisibility
uint32 m_invisibilityMask;
uint32 m_detectInvisibilityMask; // is inherited from controller in PC case
int32 m_invisibilityValues[INVISIBILITY_TYPE_MAX];
int32 m_invisibilityDetectValues[INVISIBILITY_TYPE_MAX];
// stealth
uint32 m_stealthMask;
uint32 m_stealthStrength[STEALTH_TYPE_MAX];
uint32 m_stealthDetectStrength[STEALTH_TYPE_MAX];
WorldObject* m_owner;
};
#endif
|
/*
* This file is part of CasADi.
*
* CasADi -- A symbolic framework for dynamic optimization.
* Copyright (C) 2010 by Joel Andersson, Moritz Diehl, K.U.Leuven. All rights reserved.
*
* CasADi is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* CasADi is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with CasADi; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
#include "reshape.hpp"
#include "../stl_vector_tools.hpp"
#include "../matrix/matrix_tools.hpp"
#include "mx_tools.hpp"
#include "../sx/sx_tools.hpp"
#include "../fx/sx_function.hpp"
#include "../matrix/sparsity_tools.hpp"
using namespace std;
namespace CasADi{
Reshape::Reshape(const MX& x, CRSSparsity sp){
casadi_assert(x.size()==sp.size());
setDependencies(x);
setSparsity(sp);
}
Reshape* Reshape::clone() const{
return new Reshape(*this);
}
void Reshape::evaluateD(const DMatrixPtrV& input, DMatrixPtrV& output, const DMatrixPtrVV& fwdSeed, DMatrixPtrVV& fwdSens, const DMatrixPtrVV& adjSeed, DMatrixPtrVV& adjSens){
evaluateGen<double,DMatrixPtrV,DMatrixPtrVV>(input,output,fwdSeed,fwdSens,adjSeed,adjSens);
}
void Reshape::evaluateSX(const SXMatrixPtrV& input, SXMatrixPtrV& output, const SXMatrixPtrVV& fwdSeed, SXMatrixPtrVV& fwdSens, const SXMatrixPtrVV& adjSeed, SXMatrixPtrVV& adjSens){
evaluateGen<SX,SXMatrixPtrV,SXMatrixPtrVV>(input,output,fwdSeed,fwdSens,adjSeed,adjSens);
}
template<typename T, typename MatV, typename MatVV>
void Reshape::evaluateGen(const MatV& input, MatV& output, const MatVV& fwdSeed, MatVV& fwdSens, const MatVV& adjSeed, MatVV& adjSens){
// Quick return if inplace
if(input[0]==output[0]) return;
// Number of derivatives
int nfwd = fwdSens.size();
int nadj = adjSeed.size();
// Nondifferentiated outputs and forward sensitivities
for(int d=-1; d<nfwd; ++d){
vector<T>& res = d==-1 ? output[0]->data() : fwdSens[d][0]->data();
const vector<T>& arg = d==-1 ? input[0]->data() : fwdSeed[d][0]->data();
copy(arg.begin(),arg.end(),res.begin());
}
// Adjoint sensitivities
for(int d=0; d<nadj; ++d){
vector<T>& aseed = adjSeed[d][0]->data();
vector<T>& asens = adjSens[d][0]->data();
transform(asens.begin(),asens.end(),aseed.begin(),asens.begin(),std::plus<T>());
fill(aseed.begin(),aseed.end(),0);
}
}
void Reshape::propagateSparsity(DMatrixPtrV& input, DMatrixPtrV& output, bool fwd){
// Quick return if inplace
if(input[0]==output[0]) return;
bvec_t *res_ptr = get_bvec_t(output[0]->data());
vector<double>& arg = input[0]->data();
bvec_t *arg_ptr = get_bvec_t(arg);
if(fwd){
copy(arg_ptr, arg_ptr+arg.size(), res_ptr);
} else {
for(int k=0; k<arg.size(); ++k){
*arg_ptr++ |= *res_ptr;
*res_ptr++ = 0;
}
}
}
void Reshape::printPart(std::ostream &stream, int part) const{
if(part==0){
stream << "reshape(";
} else {
stream << ")";
}
}
void Reshape::evaluateMX(const MXPtrV& input, MXPtrV& output, const MXPtrVV& fwdSeed, MXPtrVV& fwdSens, const MXPtrVV& adjSeed, MXPtrVV& adjSens, bool output_given){
// Quick return if inplace
if(input[0]==output[0]) return;
if(!output_given){
*output[0] = reshape(*input[0],size1(),size2());
}
// Forward sensitivities
int nfwd = fwdSens.size();
for(int d = 0; d<nfwd; ++d){
*fwdSens[d][0] = reshape(*fwdSeed[d][0],size1(),size2());
}
// Adjoint sensitivities
int nadj = adjSeed.size();
for(int d=0; d<nadj; ++d){
MX& aseed = *adjSeed[d][0];
MX& asens = *adjSens[d][0];
asens += reshape(aseed,dep().size1(),dep().size2());
aseed = MX();
}
}
void Reshape::generateOperation(std::ostream &stream, const std::vector<std::string>& arg, const std::vector<std::string>& res, CodeGenerator& gen) const{
// Quick return if inplace
if(arg[0].compare(res[0])==0) return;
stream << " for(i=0; i<" << size() << "; ++i) " << res.front() << "[i] = " << arg.front() << "[i];" << endl;
}
MX Reshape::getReshape(const CRSSparsity& sp) const{
return reshape(dep(0),sp);
}
} // namespace CasADi
|
// Copyright (C) 2008, 2009, 2011, 2012, 2013 EPITA Research and
// Development Laboratory (LRDE)
//
// This file is part of Olena.
//
// Olena is free software: you can redistribute it and/or modify it under
// the terms of the GNU General Public License as published by the Free
// Software Foundation, version 2 of the License.
//
// Olena is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Olena. If not, see <http://www.gnu.org/licenses/>.
//
// As a special exception, you may use this file as part of a free
// software project without restriction. Specifically, if other files
// instantiate templates or use macros or inline functions from this
// file, or you compile this file and link it with other files to produce
// an executable, this file does not by itself cause the resulting
// executable to be covered by the GNU General Public License. This
// exception does not however invalidate any other reasons why the
// executable file might be covered by the GNU General Public License.
#ifndef MLN_CORE_INTERNAL_SITE_SET_ITERATOR_BASE_HH
# define MLN_CORE_INTERNAL_SITE_SET_ITERATOR_BASE_HH
/// \file
///
/// \brief Base class to factor code for iterator classes directly
/// working on site sets.
# include <mln/core/internal/site_iterator_base.hh>
namespace mln
{
namespace internal
{
/*!
\internal
\brief A base class for iterators on site sets.
Parameter \c S is the targeted site set type.
IMPORTANT: Sub-classes have to define start_, next_,
is_valid_ and invalidate_. They may also define
change_target_.
*/
template <typename S, typename E>
class site_set_iterator_base : public site_iterator_base<S, E>
{
public:
/// The associated site set type.
typedef S pset;
/// Give the site set that this iterator browses.
const S& site_set() const;
/// Change the site set targeted by this iterator.
void change_target(const S& s);
/// \cond INTERNAL_API
/// Hook to the current location.
const mln_psite(S)& p_hook_() const;
/// Part of the change_target specific to the exact iterator
/// type, empty by default (to be overloaded).
void change_target_(const S& s);
/// \endcond
protected:
/// The psite designated by this iterator.
mln_psite(S) p_;
/// Constructor without argument.
site_set_iterator_base();
};
#ifndef MLN_INCLUDE_ONLY
template <typename S, typename E>
inline
site_set_iterator_base<S, E>::site_set_iterator_base()
{
}
template <typename S, typename E>
inline
const S&
site_set_iterator_base<S, E>::site_set() const
{
mln_precondition(this->s_ != 0);
return *this->s_;
}
template <typename S, typename E>
inline
void
site_set_iterator_base<S, E>::change_target(const S& s)
{
this->s_ = & s;
// p might be also updated since it can hold a pointer towards
// the set it designates, so:
if_possible::change_target(p_, s);
// Likewise, the iterator might need to update specific data.
exact(this)->change_target_(s);
// Last:
this->invalidate();
}
template <typename S, typename E>
inline
void
site_set_iterator_base<S, E>::change_target_(const S& s)
{
(void) s;
// Empty by default.
}
template <typename S, typename E>
inline
const mln_psite(S)&
site_set_iterator_base<S, E>::p_hook_() const
{
return p_;
}
#endif // ! MLN_INCLUDE_ONLY
} // end of namespace internal
} // end of namespace mln
#endif // ! MLN_CORE_INTERNAL_SITE_SET_ITERATOR_BASE_HH
|
# coding=utf-8
"""
.NET (CLR) specific functions
"""
__author__ = 'Ilya.Kazakevich'
def get_namespace_by_name(object_name):
"""
Gets namespace for full object name. Sometimes last element of name is module while it may be class.
For System.Console returns System, for System.Web returns System.Web.
Be sure all required assemblies are loaded (i.e. clr.AddRef.. is called)
:param object_name: name to parse
:return: namespace
"""
(imported_object, object_name) = _import_first(object_name)
parts = object_name.partition(".")
first_part = parts[0]
remain_part = parts[2]
while remain_part and type(_get_attr_by_name(imported_object, remain_part)) is type: # While we are in class
remain_part = remain_part.rpartition(".")[0]
if remain_part:
return first_part + "." + remain_part
else:
return first_part
def _import_first(object_name):
"""
Some times we can not import module directly. For example, Some.Class.InnerClass could not be imported: you need to import "Some.Class"
or even "Some" instead. This function tries to find part of name that could be loaded
:param object_name: name in dotted notation like "Some.Function.Here"
:return: (imported_object, object_name): tuple with object and its name
"""
while object_name:
try:
return (__import__(object_name, globals=[], locals=[], fromlist=[]), object_name)
except ImportError:
object_name = object_name.rpartition(".")[0] # Remove rightest part
raise Exception("No module name found in name " + object_name)
def _get_attr_by_name(obj, name):
"""
Accepts chain of attributes in dot notation like "some.property.name" and gets them on object
:param obj: object to introspec
:param name: attribute name
:return attribute
>>> str(_get_attr_by_name("A", "__class__.__class__"))
"<type 'type'>"
>>> str(_get_attr_by_name("A", "__class__.__len__.__class__"))
"<type 'method_descriptor'>"
"""
result = obj
parts = name.split('.')
for part in parts:
result = getattr(result, part)
return result
|
import common
# create admin account
try:
admin_id = common.users.add('hat@headquarters.com', 'adminpass', name='Admin', groups="admin", email_confirmed='1')
except KeyError:
admin = common.users.get('hat@headquarters.com', 'adminpass')
admin_id = admin.id
# create app owner account
try:
boss_id = common.users.add('owner@company.com', 'bosspass', name='Pointy Haired Boss', email_confirmed='1')
except KeyError:
boss = common.users.get('owner@company.com', 'bosspass')
boss_id = boss.id
# create user account (who will have access to the app)
try:
user_id = common.users.add('user@customer.com', 'secretpass', name='Bob the Customer', email_confirmed='1')
except KeyError:
user = common.users.get('user@customer.com', 'secretpass')
user_id = user.id
# create user account (who will NOT have access to the app)
try:
other_user_id = common.users.add('john@doe.com', '123abc', name='John Doe', email_confirmed='1')
except KeyError:
other_user = common.users.get('john@doe.com', '123abc')
other_user_id = other_user.id
# Add an application "Widget Builder"
app = common.applications.get_all_by_owner(boss_id)
if len(app) > 0:
app_id = app[0].app_id
else:
app_id = common.applications.add(
name='WidgetBuilder',
owner_id=boss_id,
scopes=['basic', 'admin'],
redirect_uris=['https://app.local:8080/private', 'https://app.local:8080/login'],
default_redirect_uri='https://app.local:8080/private')
# Add a subscription by Bob the Customer to Widget Builder
try:
common.subscriptions.add(app_id=app_id,
user_id=user_id,
subscription_type='Basic')
except KeyError:
pass
|
'''This module contains some glue code encapsulating a "main" process.
The code here wraps the most common tasks involved in creating and, especially,
training a neural network model.
'''
import climate
import os
from . import graph
logging = climate.get_logger(__name__)
class Experiment:
'''This class encapsulates tasks for training and evaluating a network.
Parameters
----------
model : :class:`Network <theanets.graph.Network>` or str
A specification for obtaining a model. If a string is given, it is
assumed to name a file containing a pickled model; this file will be
loaded and used. If a network instance is provided, it will be used
as the model. If a callable (such as a subclass) is provided, it
will be invoked using the provided keyword arguments to create a
network instance.
'''
def __init__(self, network, *args, **kwargs):
if isinstance(network, str) and os.path.isfile(network):
self.load(network)
elif isinstance(network, graph.Network):
self.network = network
else:
assert network is not graph.Network, \
'use a concrete theanets.Network subclass ' \
'like theanets.{Autoencoder,Regressor,...}'
self.network = network(*args, **kwargs)
def train(self, *args, **kwargs):
'''Train the network until the trainer converges.
All arguments are passed to :func:`train
<theanets.graph.Network.itertrain>`.
Returns
-------
training : dict
A dictionary of monitor values computed using the training dataset,
at the conclusion of training. This dictionary will at least contain
a 'loss' key that indicates the value of the loss function. Other
keys may be available depending on the trainer being used.
validation : dict
A dictionary of monitor values computed using the validation
dataset, at the conclusion of training.
'''
return self.network.train(*args, **kwargs)
def itertrain(self, *args, **kwargs):
'''Train the network iteratively.
All arguments are passed to :func:`itertrain
<theanets.graph.Network.itertrain>`.
Yields
------
training : dict
A dictionary of monitor values computed using the training dataset,
at the conclusion of training. This dictionary will at least contain
a 'loss' key that indicates the value of the loss function. Other
keys may be available depending on the trainer being used.
validation : dict
A dictionary of monitor values computed using the validation
dataset, at the conclusion of training.
'''
return self.network.itertrain(*args, **kwargs)
def save(self, path):
'''Save the current network to a pickle file on disk.
Parameters
----------
path : str
Location of the file to save the network.
'''
self.network.save(path)
def load(self, path):
'''Load a saved network from a pickle file on disk.
This method sets the ``network`` attribute of the experiment to the
loaded network model.
Parameters
----------
filename : str
Load the keyword arguments and parameters of a network from a pickle
file at the named path. If this name ends in ".gz" then the input
will automatically be gunzipped; otherwise the input will be treated
as a "raw" pickle.
Returns
-------
network : :class:`Network <graph.Network>`
A newly-constructed network, with topology and parameters loaded
from the given pickle file.
'''
self.network = graph.Network.load(path)
return self.network
|
# coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The ImageNet-R image classification dataset."""
import os
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
_CITATION = r"""
@article{hendrycks2020many,
title={The Many Faces of Robustness: A Critical Analysis of Out-of-Distribution Generalization},
author={Dan Hendrycks and Steven Basart and Norman Mu and Saurav Kadavath and Frank Wang and Evan Dorundo and Rahul Desai and Tyler Zhu and Samyak Parajuli and Mike Guo and Dawn Song and Jacob Steinhardt and Justin Gilmer},
journal={arXiv preprint arXiv:2006.16241},
year={2020}
}
"""
_DESCRIPTION = """
ImageNet-R is a set of images labelled with ImageNet labels that were obtained
by collecting art, cartoons, deviantart, graffiti, embroidery, graphics,
origami, paintings, patterns, plastic objects, plush objects, sculptures,
sketches, tattoos, toys, and video game renditions of ImageNet classes.
ImageNet-R has renditions of 200 ImageNet classes resulting in 30,000 images.
by collecting new data and keeping only those images that ResNet-50 models fail
to correctly classify. For more details please refer to the paper.
The label space is the same as that of ImageNet2012. Each example is
represented as a dictionary with the following keys:
* 'image': The image, a (H, W, 3)-tensor.
* 'label': An integer in the range [0, 1000).
* 'file_name': A unique sting identifying the example within the dataset.
"""
_IMAGENET_LABELS_FILENAME = r'image_classification/imagenet2012_labels.txt'
_IMAGENET_R_URL = r'https://people.eecs.berkeley.edu/~hendrycks/imagenet-r.tar'
class ImagenetR(tfds.core.GeneratorBasedBuilder):
"""ImageNet object renditions with ImageNet labels."""
VERSION = tfds.core.Version('0.2.0')
SUPPORTED_VERSIONS = [
tfds.core.Version('0.1.0'),
]
RELEASE_NOTES = {
'0.2.0': ('Fix file_name, from absolute path to path relative to '
'imagenet-r directory, ie: "imagenet_synset_id/filename.jpg".')
}
def _info(self):
names_file = tfds.core.tfds_path(_IMAGENET_LABELS_FILENAME)
return tfds.core.DatasetInfo(
builder=self,
# This is the description that will appear on the datasets page.
description=_DESCRIPTION,
# tfds.features.FeatureConnectors
features=tfds.features.FeaturesDict({
'image': tfds.features.Image(encoding_format='jpeg'),
'label': tfds.features.ClassLabel(names_file=names_file),
'file_name': tfds.features.Text(),
}),
# Used if as_supervised=True in # builder.as_dataset.
supervised_keys=('image', 'label'),
# Homepage of the dataset for documentation
homepage='https://github.com/hendrycks/imagenet-r',
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns a SplitGenerator for the test set."""
imagenet_r_root = os.path.join(
dl_manager.download_and_extract(_IMAGENET_R_URL), 'imagenet-r')
return [
tfds.core.SplitGenerator(
# The dataset provides only a test split.
name=tfds.Split.TEST,
# These kwargs will be passed to _generate_examples
gen_kwargs={'imagenet_r_root': imagenet_r_root},
),
]
def _generate_examples(self, imagenet_r_root):
"""Yields the examples."""
# The directory structure is `imagenet-r/imagenet_synset_id/filename.jpg`.
for class_synset in tf.io.gfile.listdir(imagenet_r_root):
class_dir = os.path.join(imagenet_r_root, class_synset)
if not tf.io.gfile.isdir(class_dir):
continue
for image_filename in tf.io.gfile.listdir(class_dir):
image_path = os.path.join(class_dir, image_filename)
features = {
'image': image_path,
'label': class_synset,
'file_name': os.path.join(class_synset, image_filename),
}
yield f'{class_synset}_{image_filename}', features
|
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
<title>windows::basic_handle::basic_handle</title>
<link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css">
<meta name="generator" content="DocBook XSL Stylesheets V1.78.1">
<link rel="home" href="../../../boost_asio.html" title="Boost.Asio">
<link rel="up" href="../windows__basic_handle.html" title="windows::basic_handle">
<link rel="prev" href="assign/overload2.html" title="windows::basic_handle::assign (2 of 2 overloads)">
<link rel="next" href="basic_handle/overload1.html" title="windows::basic_handle::basic_handle (1 of 3 overloads)">
</head>
<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
<table cellpadding="2" width="100%"><tr>
<td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td>
<td align="center"><a href="../../../../../index.html">Home</a></td>
<td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td>
<td align="center"><a href="http://www.boost.org/users/people.html">People</a></td>
<td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td>
<td align="center"><a href="../../../../../more/index.htm">More</a></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="assign/overload2.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../windows__basic_handle.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="basic_handle/overload1.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
<div class="section">
<div class="titlepage"><div><div><h4 class="title">
<a name="boost_asio.reference.windows__basic_handle.basic_handle"></a><a class="link" href="basic_handle.html" title="windows::basic_handle::basic_handle">windows::basic_handle::basic_handle</a>
</h4></div></div></div>
<p>
<a class="indexterm" name="idp171196016"></a>
Construct a <a class="link" href="../windows__basic_handle.html" title="windows::basic_handle"><code class="computeroutput"><span class="identifier">windows</span><span class="special">::</span><span class="identifier">basic_handle</span></code></a> without opening it.
</p>
<pre class="programlisting"><span class="keyword">explicit</span> <a class="link" href="basic_handle/overload1.html" title="windows::basic_handle::basic_handle (1 of 3 overloads)">basic_handle</a><span class="special">(</span>
<span class="identifier">boost</span><span class="special">::</span><span class="identifier">asio</span><span class="special">::</span><span class="identifier">io_service</span> <span class="special">&</span> <span class="identifier">io_service</span><span class="special">);</span>
<span class="emphasis"><em>» <a class="link" href="basic_handle/overload1.html" title="windows::basic_handle::basic_handle (1 of 3 overloads)">more...</a></em></span>
</pre>
<p>
Construct a <a class="link" href="../windows__basic_handle.html" title="windows::basic_handle"><code class="computeroutput"><span class="identifier">windows</span><span class="special">::</span><span class="identifier">basic_handle</span></code></a> on an existing native
handle.
</p>
<pre class="programlisting"><a class="link" href="basic_handle/overload2.html" title="windows::basic_handle::basic_handle (2 of 3 overloads)">basic_handle</a><span class="special">(</span>
<span class="identifier">boost</span><span class="special">::</span><span class="identifier">asio</span><span class="special">::</span><span class="identifier">io_service</span> <span class="special">&</span> <span class="identifier">io_service</span><span class="special">,</span>
<span class="keyword">const</span> <span class="identifier">native_handle_type</span> <span class="special">&</span> <span class="identifier">handle</span><span class="special">);</span>
<span class="emphasis"><em>» <a class="link" href="basic_handle/overload2.html" title="windows::basic_handle::basic_handle (2 of 3 overloads)">more...</a></em></span>
</pre>
<p>
Move-construct a <a class="link" href="../windows__basic_handle.html" title="windows::basic_handle"><code class="computeroutput"><span class="identifier">windows</span><span class="special">::</span><span class="identifier">basic_handle</span></code></a> from another.
</p>
<pre class="programlisting"><a class="link" href="basic_handle/overload3.html" title="windows::basic_handle::basic_handle (3 of 3 overloads)">basic_handle</a><span class="special">(</span>
<span class="identifier">basic_handle</span> <span class="special">&&</span> <span class="identifier">other</span><span class="special">);</span>
<span class="emphasis"><em>» <a class="link" href="basic_handle/overload3.html" title="windows::basic_handle::basic_handle (3 of 3 overloads)">more...</a></em></span>
</pre>
</div>
<table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr>
<td align="left"></td>
<td align="right"><div class="copyright-footer">Copyright © 2003-2015 Christopher M.
Kohlhoff<p>
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)
</p>
</div></td>
</tr></table>
<hr>
<div class="spirit-nav">
<a accesskey="p" href="assign/overload2.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../windows__basic_handle.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="basic_handle/overload1.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a>
</div>
</body>
</html>
|
# -*- encoding: utf-8 -*-
import os
import click
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
from strephit.web_sources_corpus.preprocess_corpus import preprocess_corpus
from strephit.web_sources_corpus import run_all, archive_org
@click.command()
@click.argument('spider-name', nargs=-1, required=True)
@click.argument('results-dir', type=click.Path(resolve_path=True, file_okay=False))
def crawl(spider_name, results_dir):
""" Run one or more spiders """
settings = get_project_settings()
# prevent scrapy from configuring its own logging, since we already have it
settings.set('LOG_ENABLED', False)
process = CrawlerProcess(settings)
for s in spider_name:
process.settings.set('FEED_URI',
'file://%s.jsonlines' % os.path.join(results_dir, s))
process.settings.set('FEED_FORMAT', 'jsonlines')
spider = process.spider_loader.load(s)
process.crawl(spider)
process.start()
CLI_COMMANDS = {
'preprocess_corpus': preprocess_corpus,
'run_all': run_all.main,
'scrapy_crawl': crawl,
'archive_org_crawl': archive_org.cli,
}
@click.group(name='web_sources_corpus', commands=CLI_COMMANDS)
@click.pass_context
def cli(ctx):
""" Corpus retrieval from the web """
pass
|
"""Filename matching with shell patterns.
fnmatch(FILENAME, PATTERN) matches according to the local convention.
fnmatchcase(FILENAME, PATTERN) always takes case in account.
The functions operate by translating the pattern into a regular
expression. They cache the compiled regular expressions for speed.
The function translate(PATTERN) returns a regular expression
corresponding to PATTERN. (It does not compile it.)
"""
import os
import posixpath
import re
try:
from functools import lru_cache
except ImportError:
from .compat import lru_cache
__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"]
def fnmatch(name, pat):
"""Test whether FILENAME matches PATTERN.
Patterns are Unix shell style:
* matches everything
? matches any single character
[seq] matches any character in seq
[!seq] matches any char not in seq
An initial period in FILENAME is not special.
Both FILENAME and PATTERN are first case-normalized
if the operating system requires it.
If you don't want this, use fnmatchcase(FILENAME, PATTERN).
"""
name = os.path.normcase(name)
pat = os.path.normcase(pat)
return fnmatchcase(name, pat)
lru_cache(maxsize=256, typed=True)
def _compile_pattern(pat):
if isinstance(pat, bytes):
pat_str = pat.decode('ISO-8859-1')
res_str = translate(pat_str)
res = res_str.encode('ISO-8859-1')
else:
res = translate(pat)
return re.compile(res).match
def filter(names, pat):
"""Return the subset of the list NAMES that match PAT."""
result = []
pat = os.path.normcase(pat)
match = _compile_pattern(pat)
if os.path is posixpath:
# normcase on posix is NOP. Optimize it away from the loop.
for name in names:
m = match(name)
if m:
result.append((name, m.groups()))
else:
for name in names:
m = match(os.path.normcase(name))
if m:
result.append((name, m.groups()))
return result
def fnmatchcase(name, pat):
"""Test whether FILENAME matches PATTERN, including case.
This is a version of fnmatch() which doesn't case-normalize
its arguments.
"""
match = _compile_pattern(pat)
return match(name) is not None
def translate(pat):
"""Translate a shell PATTERN to a regular expression.
There is no way to quote meta-characters.
"""
i, n = 0, len(pat)
res = ''
while i < n:
c = pat[i]
i = i+1
if c == '*':
res = res + '(.*)'
elif c == '?':
res = res + '(.)'
elif c == '[':
j = i
if j < n and pat[j] == '!':
j = j+1
if j < n and pat[j] == ']':
j = j+1
while j < n and pat[j] != ']':
j = j+1
if j >= n:
res = res + '\\['
else:
stuff = pat[i:j].replace('\\','\\\\')
i = j+1
if stuff[0] == '!':
stuff = '^' + stuff[1:]
elif stuff[0] == '^':
stuff = '\\' + stuff
res = '%s([%s])' % (res, stuff)
else:
res = res + re.escape(c)
return res + '\Z(?ms)'
|
<?php
/*
* (c) Jean-François Lépine <https://twitter.com/Halleck45>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Hal\MaintenabilityIndex;
/**
* Calculates Maintenability Index
*
* According to Wikipedia, "Maintainability Index is a software metric which measures how maintainable (easy to
* support and change) the source code is. The maintainability index is calculated as a factored formula consisting
* of Lines Of Code, Cyclomatic Complexity and Halstead volume."
*
* @author Jean-François Lépine <https://twitter.com/Halleck45>
*/
class MaintenabilityIndex {
/**
* Calculates Maintenability Index
*
* @param \Hal\Halstead\Result $rHalstead
* @param \Hal\Loc\Result $rLoc
* @return Result
*/
public function calculate(\Hal\Halstead\Result $rHalstead, \Hal\Loc\Result $rLoc)
{
$result = new Result;
$result->setMaintenabilityIndex(
171
- (5.2 * \log($rHalstead->getEffort()))
- (0.23 * \log($rLoc->getComplexityCyclomatic()))
- (16.2 * \log($rLoc->getLogicalLoc()))
);
return $result;
}
}
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.identity import identity_service
from openstack import resource
class Trust(resource.Resource):
resource_key = 'trust'
resources_key = 'trusts'
base_path = '/OS-TRUST/trusts'
service = identity_service.IdentityService()
# capabilities
allow_create = True
allow_get = True
allow_delete = True
allow_list = True
_query_mapping = resource.QueryParameters(
'trustor_user_id', 'trustee_user_id')
# Properties
#: A boolean indicating whether the trust can be issued by the trustee as
#: a regulart trust. Default is ``False``.
allow_redelegation = resource.Body('allow_redelegation', type=bool)
#: If ``impersonation`` is set to ``False``, then the token's ``user``
#: attribute will represent that of the trustee. *Type: bool*
is_impersonation = resource.Body('impersonation', type=bool)
#: Specifies the expiration time of the trust. A trust may be revoked
#: ahead of expiration. If the value represents a time in the past,
#: the trust is deactivated.
expires_at = resource.Body('expires_at')
#: If ``impersonation`` is set to true, then the ``user`` attribute
#: of tokens that are generated based on the trust will represent
#: that of the trustor rather than the trustee, thus allowing the trustee
#: to impersonate the trustor.
#: If ``impersonation`` is set to ``False``, then the token's ``user``
#: attribute will represent that of the trustee. *Type: bool*
is_impersonation = resource.Body('impersonation', type=bool)
#: Links for the trust resource.
links = resource.Body('links')
#: ID of the project upon which the trustor is
#: delegating authorization. *Type: string*
project_id = resource.Body('project_id')
#: A role links object that includes 'next', 'previous', and self links
#: for roles.
role_links = resource.Body('role_links')
#: Specifies the subset of the trustor's roles on the ``project_id``
#: to be granted to the trustee when the token in consumed. The
#: trustor must already be granted these roles in the project referenced
#: by the ``project_id`` attribute. *Type: list*
roles = resource.Body('roles')
#: Returned with redelegated trust provides information about the
#: predecessor in the trust chain.
redelegated_trust_id = resource.Body('redelegated_trust_id')
#: Redelegation count
redelegation_count = resource.Body('redelegation_count')
#: How many times the trust can be used to obtain a token. The value is
#: decreased each time a token is issued through the trust. Once it
#: reaches zero, no further tokens will be isued through the trust.
remaining_uses = resource.Body('remaining_uses')
#: Represents the user ID who is capable of consuming the trust.
#: *Type: string*
trustee_user_id = resource.Body('trustee_user_id')
#: Represents the user ID who created the trust, and who's authorization is
#: being delegated. *Type: string*
trustor_user_id = resource.Body('trustor_user_id')
|
#!/usr/bin/env python
# encoding: utf-8
"""
test.py
Created by Maximillian Dornseif on 2010-10-24.
Copyright (c) 2010 HUDORA. All rights reserved.
"""
import unittest
from huTools.http import fetch
class testTests(unittest.TestCase):
def test_fetch_get_basic(self):
status, header, body = fetch('http://www.google.com')
assert status == 200
status, header, body = fetch('http://www.postbin.org/186ndf2', {'q': 'hudora'})
assert status == 200
status, header, body = fetch('http://www.postbin.org/186ndf2', {'keyg': 'value', 'just a test': 212})
assert status == 200
def test_fetch_get_unicode(self):
status, header, body = fetch('http://www.postbin.org/186ndf2',
{'kühg1': 'Iñtërnâtiônàlizætiøn'})
assert status == 200
status, header, body = fetch('http://www.postbin.org/186ndf2',
{'kühg2': u'Iñtërnâtiônàlizætiøn'})
assert status == 200
status, header, body = fetch('http://www.postbin.org/186ndf2',
{u'kühg3': 'Iñtërnâtiônàlizætiøn'})
assert status == 200
def test_fetch_post_basic(self):
status, header, body = fetch('http://www.postbin.org/186ndf2',
{'keyp1': 'value', 'just a test': 212}, 'POST')
assert status == 201
def test_fetch_post_file(self):
status, header, body = fetch('http://www.postbin.org/186ndf2',
{'hosts': open('/etc/hosts', 'r')}, 'POST')
status, header, body = fetch('http://www.postbin.org/186ndf2',
{'hosts': open('/etc/hosts', 'r'),
'unicode': u'Iñtërnâtiônàlizætiøn'}, 'POST')
assert status == 201
def test_fetch_post_unicode(self):
status, header, body = fetch('http://www.postbin.org/186ndf2',
{'kuehp1': 'Iñtërnâtiônàlizætiøn'}, 'POST')
assert status == 201
status, header, body = fetch('http://www.postbin.org/186ndf2',
{'kühp2': 'Iñtërnâtiônàlizætiøn'}, 'POST')
assert status == 201
status, header, body = fetch('http://www.postbin.org/186ndf2',
{'kühp3': u'Iñtërnâtiônàlizætiøn'}, 'POST')
assert status == 201
status, header, body = fetch('http://www.postbin.org/186ndf2',
{u'kühp4': u'Iñtërnâtiônàlizætiøn'}, 'POST')
assert status == 201
if __name__ == '__main__':
unittest.main()
|
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of Efficient Java Matrix Library (EJML).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ejml.dense.row.decompose.lu;
import org.ejml.EjmlStandardJUnit;
import org.ejml.UtilEjml;
import org.ejml.data.Complex_F64;
import org.ejml.data.ZMatrixRMaj;
import org.ejml.dense.row.CommonOps_ZDRM;
import org.ejml.dense.row.RandomMatrices_ZDRM;
import org.ejml.ops.ComplexMath_F64;
import org.junit.jupiter.api.Test;
import java.util.Random;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestLUDecompositionBase_ZDRM extends EjmlStandardJUnit {
/**
* Compare the determinant computed from LU to the value computed from the minor
* matrix method.
*/
@Test
public void testDeterminant()
{
Random rand = new Random(0xfff);
int width = 10;
ZMatrixRMaj LU = RandomMatrices_ZDRM.rectangle(width,width,-1,1,rand);
Complex_F64 expected = new Complex_F64(1,0);
Complex_F64 a = new Complex_F64();
Complex_F64 tmp = new Complex_F64();
for (int i = 0; i < width; i++) {
LU.get(i, i, a);
ComplexMath_F64.multiply(expected,a,tmp);
expected.setTo(tmp);
}
DebugDecompose alg = new DebugDecompose(width);
alg.decomposeCommonInit(LU);
for( int i = 0; i < width; i++ ) alg.getIndx()[i] = i;
alg.setLU(LU);
Complex_F64 found = alg.computeDeterminant();
assertEquals(expected.real,found.real, UtilEjml.TEST_F64);
assertEquals(expected.imaginary,found.imaginary,UtilEjml.TEST_F64);
}
@Test
public void _solveVectorInternal() {
int width = 10;
ZMatrixRMaj LU = RandomMatrices_ZDRM.rectangle(width, width,-1,1, rand);
ZMatrixRMaj L = new ZMatrixRMaj(width,width);
ZMatrixRMaj U = new ZMatrixRMaj(width,width);
for (int i = 0; i < width; i++) {
for (int j = 0; j < width; j++) {
double real = LU.getReal(i,j);
double imag = LU.getImag(i, j);
if( j <= i ) {
if( j == i )
L.set(i,j,1,0);
else
L.set(i,j,real,imag);
}
if( i <= j ) {
U.set(i,j,real,imag);
}
}
}
ZMatrixRMaj x = RandomMatrices_ZDRM.rectangle(width, 1,-1,1, rand);
ZMatrixRMaj tmp = new ZMatrixRMaj(width,1);
ZMatrixRMaj b = new ZMatrixRMaj(width,1);
CommonOps_ZDRM.mult(U, x, tmp);
CommonOps_ZDRM.mult(L, tmp, b);
DebugDecompose alg = new DebugDecompose(width);
alg.decomposeCommonInit(LU);
for( int i = 0; i < width; i++ ) alg.getIndx()[i] = i;
alg.setLU(LU);
alg._solveVectorInternal(b.data);
for( int i = 0; i < width; i++ ) {
assertEquals(x.data[i],b.data[i],UtilEjml.TEST_F64);
}
}
@Test
public void solveL() {
int width = 10;
ZMatrixRMaj LU = RandomMatrices_ZDRM.rectangle(width, width,-1,1, rand);
ZMatrixRMaj L = new ZMatrixRMaj(width,width);
for (int i = 0; i < width; i++) {
for (int j = 0; j < width; j++) {
double real = LU.getReal(i,j);
double imag = LU.getImag(i, j);
if( j <= i ) {
if( j == i )
L.set(i,j,1,0);
else
L.set(i,j,real,imag);
}
}
}
ZMatrixRMaj x = RandomMatrices_ZDRM.rectangle(width, 1,-1,1, rand);
ZMatrixRMaj b = new ZMatrixRMaj(width,1);
CommonOps_ZDRM.mult(L, x, b);
DebugDecompose alg = new DebugDecompose(width);
alg.decomposeCommonInit(LU);
for( int i = 0; i < width; i++ ) alg.getIndx()[i] = i;
alg.setLU(LU);
alg.solveL(b.data);
for( int i = 0; i < width; i++ ) {
assertEquals(x.data[i],b.data[i],UtilEjml.TEST_F64);
}
}
private static class DebugDecompose extends LUDecompositionBase_ZDRM
{
public DebugDecompose(int width) {
setExpectedMaxSize(width, width);
m = n = width;
}
void setLU( ZMatrixRMaj LU ) {
this.LU = LU;
this.dataLU = LU.data;
}
@Override
public boolean decompose(ZMatrixRMaj orig) {
return false;
}
}
}
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Author: Joe H. Rahme <joe.hakim.rahme@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.object_storage import base
from tempest import clients
from tempest.common import custom_matchers
from tempest.test import attr
from tempest.test import HTTP_SUCCESS
class HealthcheckTest(base.BaseObjectTest):
@classmethod
def setUpClass(cls):
super(HealthcheckTest, cls).setUpClass()
# creates a test user. The test user will set its base_url to the Swift
# endpoint and test the healthcheck feature.
cls.data.setup_test_user()
cls.os_test_user = clients.Manager(
cls.data.test_user,
cls.data.test_password,
cls.data.test_tenant)
@classmethod
def tearDownClass(cls):
cls.data.teardown_all()
super(HealthcheckTest, cls).tearDownClass()
def setUp(self):
super(HealthcheckTest, self).setUp()
client = self.os_test_user.account_client
client._set_auth()
# Turning http://.../v1/foobar into http://.../
client.base_url = "/".join(client.base_url.split("/")[:-2])
def tearDown(self):
# clear the base_url for subsequent requests
self.os_test_user.account_client.base_url = None
super(HealthcheckTest, self).tearDown()
@attr('gate')
def test_get_healthcheck(self):
resp, _ = self.os_test_user.account_client.get("healthcheck", {})
# The status is expected to be 200
self.assertIn(int(resp['status']), HTTP_SUCCESS)
# The target of the request is not any Swift resource. Therefore, the
# existence of response header is checked without a custom matcher.
self.assertIn('content-length', resp)
self.assertIn('content-type', resp)
self.assertIn('x-trans-id', resp)
self.assertIn('date', resp)
# Check only the format of common headers with custom matcher
self.assertThat(resp, custom_matchers.AreAllWellFormatted())
|
package com.android.intro.movieslist.adpater;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import uk.co.senab.bitmapcache.CacheableBitmapDrawable;
import com.android.intro.custorm.imageview.NetworkedCacheableImageView;
import com.android.intro.moiveslist.*;
public class MovieAdapter extends BaseAdapter {
private final ArrayList<String> movicesUrl;
private final Context mContext;
public MovieAdapter(Context context, ArrayList<String> url) {
movicesUrl = url;
mContext = context;
}
public void appendToDataSet(ArrayList<String> url){
movicesUrl.addAll(url);
}
@Override
public int getCount() {
return null != movicesUrl ? movicesUrl.size() : 0;
}
@Override
public String getItem(int position) {
return movicesUrl.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (null == convertView) {
convertView = LayoutInflater.from(mContext)
.inflate(R.layout.gridview_item_layout, parent, false);
}
NetworkedCacheableImageView imageView = (NetworkedCacheableImageView) convertView
.findViewById(R.id.nciv_pug);
TextView status = (TextView) convertView.findViewById(R.id.tv_status);
final boolean fromCache = imageView
.loadImage(movicesUrl.get(position), false, new UpdateTextViewListener(status));
if (fromCache) {
status.setText("From Memory Cache");
status.setBackgroundColor(mContext.getResources().getColor(R.color.translucent_green));
} else {
status.setText("Loading...");
status.setBackgroundColor(mContext.getResources().getColor(R.color.translucent_green));
}
return convertView;
}
static class UpdateTextViewListener
implements NetworkedCacheableImageView.OnImageLoadedListener {
private final WeakReference<TextView> mTextViewRef;
public UpdateTextViewListener(TextView tv) {
mTextViewRef = new WeakReference<TextView>(tv);
}
@Override
public void onImageLoaded(CacheableBitmapDrawable result) {
final TextView tv = mTextViewRef.get();
if (tv == null) {
return;
}
if (result == null) {
tv.setText("Failed");
tv.setBackgroundDrawable(null);
return;
}
switch (result.getSource()) {
case CacheableBitmapDrawable.SOURCE_UNKNOWN:
case CacheableBitmapDrawable.SOURCE_NEW:
tv.setText("From Disk/Network");
tv.setBackgroundColor(tv.getResources().getColor(R.color.translucent_red));
break;
case CacheableBitmapDrawable.SOURCE_INBITMAP:
tv.setText("Reused Bitmap");
tv.setBackgroundColor(tv.getResources().getColor(R.color.translucent_blue));
break;
}
}
}
}
|
# -*- coding: utf-8 -*-
from mock import Mock
from django.utils import unittest
from django.conf import settings
from transifex.resources.formats.registry import registry, _FormatsRegistry
from transifex.resources.formats.pofile import POHandler, POTHandler
from transifex.txcommon.tests.base import BaseTestCase
class TestRegistry(BaseTestCase):
def setUp(self):
super(TestRegistry, self).setUp()
methods = {
'PO': {
'description': 'PO file handler',
'file-extensions': '.po, .pot',
'mimetype': 'text/x-po, application/x-gettext, application/x-po',
}, 'QT': {
'description': 'Qt Files',
'mimetype': 'application/xml',
'file-extensions': '.ts'
},
}
handlers = {
'PO': 'resources.formats.pofile.POHandler',
'QT': 'resources.formats.qt.LinguistHandler',
}
self.registry = _FormatsRegistry(methods=methods, handlers=handlers)
def test_register(self):
from transifex.resources.formats.joomla import JoomlaINIHandler
self.registry.add_handler('INI', JoomlaINIHandler)
self.assertEquals(len(self.registry.handlers.keys()), 3)
self.assertIn('INI', self.registry.handlers.keys())
j = self.registry.handler_for('INI')
self.assertIsInstance(j, JoomlaINIHandler)
def test_extensions(self):
extensions = self.registry.extensions_for('PO')
self.assertEquals(len(extensions), 2)
self.assertEquals(extensions[0], '.po')
self.assertEquals(extensions[1], '.pot')
def test_mimetypes(self):
mimetypes = self.registry.mimetypes_for('PO')
self.assertEquals(len(mimetypes), 3)
self.assertEquals(mimetypes[0], 'text/x-po')
self.assertEquals(mimetypes[1], 'application/x-gettext')
self.assertEquals(mimetypes[2], 'application/x-po')
class TestAppropriateHandler(unittest.TestCase):
"""Test the process of finding the appropriate handler in
various situations.
"""
@classmethod
def setUpClass(cls):
cls.appropriate_handler = registry.appropriate_handler
def test_normal_types(self):
for method in settings.I18N_METHODS:
if method not in ('PO', 'POT', ):
resource = Mock()
resource.__dict__['i18n_type'] = method
handler = self.appropriate_handler(resource, None)
self.assertIsInstance(
handler, type(registry.handler_for(method))
)
def test_get(self):
resource = Mock()
resource.__dict__['i18n_type'] = 'PO'
resource.source_language = 'en'
handler = self.appropriate_handler(resource, None)
self.assertIsInstance(handler, POTHandler)
handler = self.appropriate_handler(resource, 'en')
self.assertIsInstance(handler, POHandler)
handler = self.appropriate_handler(resource, 'el')
self.assertIsInstance(handler, POHandler)
def test_save(self):
resource = Mock()
resource.__dict__['i18n_type'] = 'PO'
resource.source_language = 'en'
filename = 'f.po'
handler = self.appropriate_handler(resource, None, filename=filename)
self.assertIsInstance(handler, POHandler)
handler = self.appropriate_handler(resource, 'en', filename=filename)
self.assertIsInstance(handler, POHandler)
handler = self.appropriate_handler(resource, 'el', filename=filename)
self.assertIsInstance(handler, POHandler)
filename = 'f.pot'
handler = self.appropriate_handler(resource, None, filename=filename)
self.assertIsInstance(handler, POTHandler)
handler = self.appropriate_handler(resource, 'en', filename=filename)
self.assertIsInstance(handler, POTHandler)
handler = self.appropriate_handler(resource, 'el', filename=filename)
self.assertIsInstance(handler, POTHandler)
class TestFileExtensions(unittest.TestCase):
"""Test the file extensions used."""
def setUp(self):
self.resource = Mock()
self.resource.source_language = 'en'
def test_extensions(self):
for method in registry.available_methods:
if method == 'POT':
continue
self.resource.i18n_method = method
correct_extensions = registry.extensions_for(method)
for lang in ('en', 'el'):
extension_returned = registry.file_extension_for(
self.resource, lang
)
self.assertIn(extension_returned, correct_extensions)
def test_po_extensions(self):
"""Test PO/POT extensions.
If langauge is None: extension == 'pot'.
"""
self.resource.i18n_method = 'PO'
for lang in ('en', 'el', None):
extension = registry.file_extension_for(self.resource, lang)
if lang is None:
self.assertEqual(extension, registry.extensions_for('POT')[0])
else:
self.assertEqual(extension, registry.extensions_for('PO')[0])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.