prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>page.js<|end_file_name|><|fim▁begin|>import React from "react";
import Griddle from 'griddle-react';
import styles from "./style.css";
import restCalls from '../../utils/restCalls';
import { browserHistory } from 'react-router';
import AddNewCaseModal from '../../common/components/CaseModal';
import ViewCaseModal from '../../common/components/ViewCaseModal';
import TreasuryModal from '../../common/components/TreasuryModal';
import ViewTreasuryModal from '../../common/components/ViewTreasuryModal';
import NachaDrop from '../../common/components/NachaDrop';
import {Nav, NavDropdown,Navbar, NavItem, MenuItem, Button,Accordion,Panel} from 'react-bootstrap';
var HomePage = React.createClass ({
getInitialState: function(){
return {
userInfo: JSON.parse(window.localStorage.getItem('user')),
currentCaseData: "No Case Selected"
}
},
calcDayDelta: function(theCase){
let oneDay = 24*60*60*1000; // hours*minutes*seconds*milliseconds
let dateCaseOpened = new Date(theCase.dateCreated);
let numDaysOpened = Math.round(Math.abs((Date.now() - dateCaseOpened.getTime())/(oneDay)));
theCase.daysOpen = numDaysOpened;
return theCase;
},
closedCases: function(){
//set cases to empty object
this.setState({ cases: {} })
//request all the cases from DB
restCalls.getDashboardInfo()
//after those cases come back pass to allCases
.then(function(allCases){
//for each case obj in all cases calc how long it has been open
var closedCases = allCases.filter(function(aCase){
return aCase.currentStatus == "closed";
});
closedCases.map( theCase => this.calcDayDelta(theCase) );
this.setState({cases: closedCases});
}.bind(this))
},
allCases: function(){
//request all the cases from DB
restCalls.getDashboardInfo()
//after those cases come back pass to allCases
.then(function(allCases){
//for each case obj in all cases calc how long it has been open
allCases.map( theCase => this.calcDayDelta(theCase) );
this.setState({cases: allCases});
}.bind(this))
},
myCases: function(){
//set cases to empty object
this.setState({ cases: {} })
//request all the cases from DB
restCalls.getDashboardInfo()
//after those cases come back pass to allCases
.then(function(allCases){
//for each case obj in all cases calc how long it has been open
//allCases = [ {assignedto:1}, {assignedto:2} ]
var myCasesMutated = allCases.filter(function(aCase){
return aCase.userIdAssigned == "8";
});
if (myCasesMutated.length > 0)
myCasesMutated.map( theCase => this.calcDayDelta(theCase) );
this.setState({cases: myCasesMutated});
}.bind(this))
},
componentDidMount: function() {
this.allCases();
},
logOut: function(){
window.localStorage.removeItem('user');
browserHistory.push('/');
},
openGovRecModal: function () {
this.refs.govRecCaseModal.open();
},
openTresModal: function () {
this.refs.tresModal.open();
},
//wrapping caseData attributes in H3 html element
//TODO should move this to an onShow() function in Modal
parseCaseData(theCase){
return Object.keys(theCase).map( (theKey, idx) => <h3 key={idx} > {theCase[theKey]} </h3>);
},
// open modal to view all case details
rowClick: function (rowCaseData) {
//setting state allows us to update data in viewCaseModal
this.setState({caseData: rowCaseData.props.data});
this.refs.viewCaseModal.open();
},
render() {
return (
<div className={styles.content}>
<Navbar>
<Navbar.Header>
<Navbar.Brand>
<a href="#">CCMS</a>
</Navbar.Brand>
<Navbar.Toggle />
</Navbar.Header>
<Navbar.Collapse>
<Nav>
<NavItem eventKey={1} active={true} href="#">Dashboard</NavItem>
<NavDropdown eventKey={3} title="Add Case" id="basic-nav-dropdown">
<MenuItem eventKey={3.1} onClick={this.openGovRecModal}>Government Reclamation</MenuItem>
<MenuItem eventKey={3.2} onClick={this.openTresModal}>Treasury Form</MenuItem>
</NavDropdown>
<NavDropdown eventKey={5} title="Case Views" id="basic-nav-dropdown">
<MenuItem eventKey={5.1} onClick={this.allCases} >All Cases</MenuItem>
<MenuItem eventKey={5.2} onClick={this.myCases} >My Cases</MenuItem>
<MenuItem eventKey={5.2} onClick={this.closedCases} >Closed Cases</MenuItem>
</NavDropdown>
<NavItem eventKey={1} onClick={this.logOut}>Log out</NavItem>
</Nav>
</Navbar.Collapse>
</Navbar>
<NachaDrop className={styles.dropbox} refreshCases={this.allCases} />
<br/>
<h1 className={styles.welcomeText} > Cases for {this.state.userInfo.firstName} {this.state.userInfo.LastName} </h1>
<br/>
<Griddle
results={this.state.cases}
tableClassName="table" showFilter={true}
showSettings={true}
columns={["caseId","benName", "totalAmount", "sla", 'daysOpen', 'currentStatus']}
noDataMessage={"No Cases to Display. Try Refreshing the page or click Add New above."}
onRowClick={this.rowClick}
enableInfiniteScroll={true}
bodyHeight={500}
filterPlaceholderText={"Search"}
columnMetadata={meta}
initialSort={"dateCreated"}
initialSortAscending={false}
/>
{/* This is the modal that is rendered when a row is click
currentCaseData is passed as a property which the modal can render*/}
<ViewCaseModal refreshCases={this.allCases} case={this.state.caseData} ref={'viewCaseModal'} />
<AddNewCaseModal refreshCases={this.allCases} ref={'govRecCaseModal'} />
<ViewTreasuryModal case={this.state.caseData} ref={'viewTreasuryModal'} />
<TreasuryModal refreshCases={this.allCases} ref={'tresModal'} />
</div>
);
}
})
var meta = [
{
"columnName": "caseId",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Case ID"
},
{
"columnName": "id",
"order": 1,
"locked": false,
"visible": false,
"displayName": "ID"
},
{
"columnName": "benName",
"order": 2,
"locked": false,
"visible": true,
"displayName": "Beneficiary Name"
},
{
"columnName": "totalAmount",
"order": 3,
"locked": false,
"visible": true,
"displayName": "Total Amount"
},
{
"columnName": "sla",
"order": 4,
"locked": false,
"visible": true,
"displayName": "SLA"
},
{
"columnName": "daysOpen",
"order": 5,
"locked": false,
"visible": true,
"displayName": "Days Open"
},
{
"columnName": "dateCreated",
"order": 5,
"locked": false,
"visible": false,
"displayName": "Date Created"
},
{
"columnName": "currentStatus",
"order": 6,
"locked": false,
"visible": true,
"displayName": "Status"
},
{
"columnName": "dateCreated",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Date Created"
},
{
"columnName": "assigned",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Assigned"
},
{
"columnName": "dateVerified",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Date Verified"
},
{
"columnName": "userIdClosed",
"order": 1,
"locked": false,
"visible": false,
"displayName": "User Id Closed"<|fim▁hole|> "order": 1,
"locked": false,
"visible": true,
"displayName": "Watch"
},
{
"columnName": "checkNumber",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Check Number"
},
{
"columnName": "locked",
"order": 1,
"locked": false,
"visible": false,
"displayName": "locked"
},
{
"columnName": "benAccountNumber",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Account Number"
},
{
"columnName": "otherBenefitsComments",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Benefits Comments"
},
{
"columnName": "mailedTo",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Mailed to"
},
{
"columnName": "userIdVerified",
"order": 1,
"locked": false,
"visible": false,
"displayName": "User Id Verified"
},
{
"columnName": "reviewDeadline",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Review Deadline"
},
{
"columnName": "userIdAssigned",
"order": 1,
"locked": false,
"visible": false,
"displayName": "User Id Assigned"
},
{
"columnName": "benSocialNumber",
"order": 1,
"locked": false,
"visible": true,
"displayName": "SSN"
},
{
"columnName": "numberPayments",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Number of Payments"
},
{
"columnName": "fullRecovery",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Full Recovery"
},
{
"columnName": "glCostCenter",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Cost Center"
},
{
"columnName": "userIdOpened",
"order": 1,
"locked": false,
"visible": false,
"displayName": "User ID Opened"
},
{
"columnName": "mainType",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Case Type"
},
{
"columnName": "benCustomerId",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Beneficiary ID"
},
{
"columnName": "claimNumber",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Claim Number"
},
{
"columnName": "completedDate",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Date Completed"
},
{
"columnName": "ddaAccountNumber",
"order": 1,
"locked": false,
"visible": false,
"displayName": "DDA Account Number"
},
{
"columnName": "dateClosed",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Date Closed"
},
{
"columnName": "subType",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Sub Type"
},
{
"columnName": "dateOfDeath",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Date of Death"
},
{
"columnName": "recoveryMethod",
"order": 1,
"locked": false,
"visible": true,
"displayName": "Recovery Method"
},
{
"columnName": "additionalNotes",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Additional Notes"
},
{
"columnName": "otherRecoveryComments",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Recovery Comments"
},
{
"columnName": "otherGovBenefits",
"order": 1,
"locked": false,
"visible": false,
"displayName": "Other Gov Benefits"
},
];
module.exports = HomePage;<|fim▁end|> | },
{
"columnName": "watchItem", |
<|file_name|>StepExporter.cpp<|end_file_name|><|fim▁begin|>/*
Open Asset Import Library (assimp)
----------------------------------------------------------------------
Copyright (c) 2006-2018, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@author: Richard Steffen, 2015
----------------------------------------------------------------------
*/
#ifndef ASSIMP_BUILD_NO_EXPORT
#ifndef ASSIMP_BUILD_NO_STEP_EXPORTER
#include "StepExporter.h"
#include "ConvertToLHProcess.h"
#include <assimp/Bitmap.h>
#include <assimp/BaseImporter.h>
#include <assimp/fast_atof.h>
#include <assimp/SceneCombiner.h>
#include <iostream>
#include <ctime>
#include <set>
#include <map>
#include <list>
#include <memory>
#include <assimp/Exceptional.h>
#include <assimp/DefaultIOSystem.h>
#include <assimp/IOSystem.hpp>
#include <assimp/scene.h>
#include <assimp/light.h>
//
#if _MSC_VER > 1500 || (defined __GNUC___)
# define ASSIMP_STEP_USE_UNORDERED_MULTIMAP
# else
# define step_unordered_map map
# define step_unordered_multimap multimap
#endif
#ifdef ASSIMP_STEP_USE_UNORDERED_MULTIMAP
# include <unordered_map>
# if _MSC_VER > 1600
# define step_unordered_map unordered_map
# define step_unordered_multimap unordered_multimap
# else
# define step_unordered_map tr1::unordered_map
# define step_unordered_multimap tr1::unordered_multimap
# endif
#endif
typedef std::step_unordered_map<aiVector3D*, int> VectorIndexUMap;
/* Tested with Step viewer v4 from www.ida-step.net */
using namespace Assimp;
namespace Assimp
{
// ------------------------------------------------------------------------------------------------
// Worker function for exporting a scene to Collada. Prototyped and registered in Exporter.cpp
void ExportSceneStep(const char* pFile,IOSystem* pIOSystem, const aiScene* pScene, const ExportProperties* pProperties)
{
std::string path = DefaultIOSystem::absolutePath(std::string(pFile));
std::string file = DefaultIOSystem::completeBaseName(std::string(pFile));
// create/copy Properties
ExportProperties props(*pProperties);
// invoke the exporter
StepExporter iDoTheExportThing( pScene, pIOSystem, path, file, &props);<|fim▁hole|>
// we're still here - export successfully completed. Write result to the given IOSYstem
std::unique_ptr<IOStream> outfile (pIOSystem->Open(pFile,"wt"));
if(outfile == NULL) {
throw DeadlyExportError("could not open output .stp file: " + std::string(pFile));
}
// XXX maybe use a small wrapper around IOStream that behaves like std::stringstream in order to avoid the extra copy.
outfile->Write( iDoTheExportThing.mOutput.str().c_str(), static_cast<size_t>(iDoTheExportThing.mOutput.tellp()),1);
}
} // end of namespace Assimp
namespace {
// Collect world transformations for each node
void CollectTrafos(const aiNode* node, std::map<const aiNode*, aiMatrix4x4>& trafos) {
const aiMatrix4x4& parent = node->mParent ? trafos[node->mParent] : aiMatrix4x4();
trafos[node] = parent * node->mTransformation;
for (unsigned int i = 0; i < node->mNumChildren; ++i) {
CollectTrafos(node->mChildren[i], trafos);
}
}
// Generate a flat list of the meshes (by index) assigned to each node
void CollectMeshes(const aiNode* node, std::multimap<const aiNode*, unsigned int>& meshes) {
for (unsigned int i = 0; i < node->mNumMeshes; ++i) {
meshes.insert(std::make_pair(node, node->mMeshes[i]));
}
for (unsigned int i = 0; i < node->mNumChildren; ++i) {
CollectMeshes(node->mChildren[i], meshes);
}
}
}
// ------------------------------------------------------------------------------------------------
// Constructor for a specific scene to export
StepExporter::StepExporter(const aiScene* pScene, IOSystem* pIOSystem, const std::string& path,
const std::string& file, const ExportProperties* pProperties):
mProperties(pProperties),mIOSystem(pIOSystem),mFile(file), mPath(path),
mScene(pScene), endstr(";\n") {
CollectTrafos(pScene->mRootNode, trafos);
CollectMeshes(pScene->mRootNode, meshes);
// make sure that all formatting happens using the standard, C locale and not the user's current locale
mOutput.imbue( std::locale("C") );
mOutput.precision(16);
// start writing
WriteFile();
}
// ------------------------------------------------------------------------------------------------
// Starts writing the contents
void StepExporter::WriteFile()
{
// see http://shodhganga.inflibnet.ac.in:8080/jspui/bitstream/10603/14116/11/11_chapter%203.pdf
// note, that all realnumber values must be comma separated in x files
mOutput.setf(std::ios::fixed);
// precision for double
// see http://stackoverflow.com/questions/554063/how-do-i-print-a-double-value-with-full-precision-using-cout
mOutput.precision(16);
// standard color
aiColor4D fColor;
fColor.r = 0.8f;
fColor.g = 0.8f;
fColor.b = 0.8f;
int ind = 100; // the start index to be used
int faceEntryLen = 30; // number of entries for a triangle/face
// prepare unique (count triangles and vertices)
VectorIndexUMap uniqueVerts; // use a map to reduce find complexity to log(n)
VectorIndexUMap::iterator it;
int countFace = 0;
for (unsigned int i=0; i<mScene->mNumMeshes; ++i)
{
aiMesh* mesh = mScene->mMeshes[i];
for (unsigned int j=0; j<mesh->mNumFaces; ++j)
{
aiFace* face = &(mesh->mFaces[j]);
if (face->mNumIndices == 3) countFace++;
}
for (unsigned int j=0; j<mesh->mNumVertices; ++j)
{
aiVector3D* v = &(mesh->mVertices[j]);
it =uniqueVerts.find(v);
if (it == uniqueVerts.end())
{
uniqueVerts[v] = -1; // first mark the vector as not transformed
}
}
}
static const unsigned int date_nb_chars = 20;
char date_str[date_nb_chars];
std::time_t date = std::time(NULL);
std::strftime(date_str, date_nb_chars, "%Y-%m-%dT%H:%M:%S", std::localtime(&date));
// write the header
mOutput << "ISO-10303-21" << endstr;
mOutput << "HEADER" << endstr;
mOutput << "FILE_DESCRIPTION(('STEP AP214'),'1')" << endstr;
mOutput << "FILE_NAME('" << mFile << ".stp','" << date_str << "',(' '),(' '),'Spatial InterOp 3D',' ',' ')" << endstr;
mOutput << "FILE_SCHEMA(('automotive_design'))" << endstr;
mOutput << "ENDSEC" << endstr;
// write the top of data
mOutput << "DATA" << endstr;
mOutput << "#1=MECHANICAL_DESIGN_GEOMETRIC_PRESENTATION_REPRESENTATION(' ',(";
for (int i=0; i<countFace; ++i)
{
mOutput << "#" << i*faceEntryLen + ind + 2*uniqueVerts.size();
if (i!=countFace-1) mOutput << ",";
}
mOutput << "),#6)" << endstr;
mOutput << "#2=PRODUCT_DEFINITION_CONTEXT('',#7,'design')" << endstr;
mOutput << "#3=APPLICATION_PROTOCOL_DEFINITION('INTERNATIONAL STANDARD','automotive_design',1994,#7)" << endstr;
mOutput << "#4=PRODUCT_CATEGORY_RELATIONSHIP('NONE','NONE',#8,#9)" << endstr;
mOutput << "#5=SHAPE_DEFINITION_REPRESENTATION(#10,#11)" << endstr;
mOutput << "#6= (GEOMETRIC_REPRESENTATION_CONTEXT(3)GLOBAL_UNCERTAINTY_ASSIGNED_CONTEXT((#12))GLOBAL_UNIT_ASSIGNED_CONTEXT((#13,#14,#15))REPRESENTATION_CONTEXT('NONE','WORKSPACE'))" << endstr;
mOutput << "#7=APPLICATION_CONTEXT(' ')" << endstr;
mOutput << "#8=PRODUCT_CATEGORY('part','NONE')" << endstr;
mOutput << "#9=PRODUCT_RELATED_PRODUCT_CATEGORY('detail',' ',(#17))" << endstr;
mOutput << "#10=PRODUCT_DEFINITION_SHAPE('NONE','NONE',#18)" << endstr;
mOutput << "#11=MANIFOLD_SURFACE_SHAPE_REPRESENTATION('Root',(#16,#19),#6)" << endstr;
mOutput << "#12=UNCERTAINTY_MEASURE_WITH_UNIT(LENGTH_MEASURE(1.0E-006),#13,'','')" << endstr;
mOutput << "#13=(CONVERSION_BASED_UNIT('METRE',#20)LENGTH_UNIT()NAMED_UNIT(#21))" << endstr;
mOutput << "#14=(NAMED_UNIT(#22)PLANE_ANGLE_UNIT()SI_UNIT($,.RADIAN.))" << endstr;
mOutput << "#15=(NAMED_UNIT(#22)SOLID_ANGLE_UNIT()SI_UNIT($,.STERADIAN.))" << endstr;
mOutput << "#16=SHELL_BASED_SURFACE_MODEL('Root',(#29))" << endstr;
mOutput << "#17=PRODUCT('Root','Root','Root',(#23))" << endstr;
mOutput << "#18=PRODUCT_DEFINITION('NONE','NONE',#24,#2)" << endstr;
mOutput << "#19=AXIS2_PLACEMENT_3D('',#25,#26,#27)" << endstr;
mOutput << "#20=LENGTH_MEASURE_WITH_UNIT(LENGTH_MEASURE(1.0),#28)" << endstr;
mOutput << "#21=DIMENSIONAL_EXPONENTS(1.0,0.0,0.0,0.0,0.0,0.0,0.0)" << endstr;
mOutput << "#22=DIMENSIONAL_EXPONENTS(0.0,0.0,0.0,0.0,0.0,0.0,0.0)" << endstr;
mOutput << "#23=PRODUCT_CONTEXT('',#7,'mechanical')" << endstr;
mOutput << "#24=PRODUCT_DEFINITION_FORMATION_WITH_SPECIFIED_SOURCE(' ','NONE',#17,.NOT_KNOWN.)" << endstr;
mOutput << "#25=CARTESIAN_POINT('',(0.0,0.0,0.0))" << endstr;
mOutput << "#26=DIRECTION('',(0.0,0.0,1.0))" << endstr;
mOutput << "#27=DIRECTION('',(1.0,0.0,0.0))" << endstr;
mOutput << "#28= (NAMED_UNIT(#21)LENGTH_UNIT()SI_UNIT(.MILLI.,.METRE.))" << endstr;
mOutput << "#29=CLOSED_SHELL('',(";
for (int i=0; i<countFace; ++i)
{
mOutput << "#" << i*faceEntryLen + ind + 2*uniqueVerts.size() + 8;
if (i!=countFace-1) mOutput << ",";
}
mOutput << "))" << endstr;
// write all the unique transformed CARTESIAN and VERTEX
for (MeshesByNodeMap::const_iterator it2 = meshes.begin(); it2 != meshes.end(); ++it2)
{
const aiNode& node = *(*it2).first;
unsigned int mesh_idx = (*it2).second;
const aiMesh* mesh = mScene->mMeshes[mesh_idx];
aiMatrix4x4& trafo = trafos[&node];
for (unsigned int i = 0; i < mesh->mNumVertices; ++i)
{
aiVector3D* v = &(mesh->mVertices[i]);
it = uniqueVerts.find(v);
if (it->second >=0 ) continue;
it->second = ind; // this one is new, so set the index (ind)
aiVector3D vt = trafo * (*v); // transform the coordinate
mOutput << "#" << it->second << "=CARTESIAN_POINT('',(" << vt.x << "," << vt.y << "," << vt.z << "))" << endstr;
mOutput << "#" << it->second+1 << "=VERTEX_POINT('',#" << it->second << ")" << endstr;
ind += 2;
}
}
// write the triangles
for (unsigned int i=0; i<mScene->mNumMeshes; ++i)
{
aiMesh* mesh = mScene->mMeshes[i];
for (unsigned int j=0; j<mesh->mNumFaces; ++j)
{
aiFace* face = &(mesh->mFaces[j]);
if (face->mNumIndices != 3) continue;
aiVector3D* v1 = &(mesh->mVertices[face->mIndices[0]]);
aiVector3D* v2 = &(mesh->mVertices[face->mIndices[1]]);
aiVector3D* v3 = &(mesh->mVertices[face->mIndices[2]]);
aiVector3D dv12 = *v2 - *v1;
aiVector3D dv23 = *v3 - *v2;
aiVector3D dv31 = *v1 - *v3;
aiVector3D dv13 = *v3 - *v1;
dv12.Normalize();
dv23.Normalize();
dv31.Normalize();
dv13.Normalize();
int pid1 = uniqueVerts.find(v1)->second;
int pid2 = uniqueVerts.find(v2)->second;
int pid3 = uniqueVerts.find(v3)->second;
// mean vertex color for the face if available
if (mesh->HasVertexColors(0))
{
fColor.r = 0.0;
fColor.g = 0.0;
fColor.b = 0.0;
fColor += mesh->mColors[0][face->mIndices[0]];
fColor += mesh->mColors[0][face->mIndices[1]];
fColor += mesh->mColors[0][face->mIndices[2]];
fColor /= 3.0f;
}
int sid = ind; // the sub index
mOutput << "#" << sid << "=STYLED_ITEM('',(#" << sid+1 << "),#" << sid+8 << ")" << endstr; /* the item that must be referenced in #1 */
/* This is the color information of the Triangle */
mOutput << "#" << sid+1 << "=PRESENTATION_STYLE_ASSIGNMENT((#" << sid+2 << "))" << endstr;
mOutput << "#" << sid+2 << "=SURFACE_STYLE_USAGE(.BOTH.,#" << sid+3 << ")" << endstr;
mOutput << "#" << sid+3 << "=SURFACE_SIDE_STYLE('',(#" << sid+4 << "))" << endstr;
mOutput << "#" << sid+4 << "=SURFACE_STYLE_FILL_AREA(#" << sid+5 << ")" << endstr;
mOutput << "#" << sid+5 << "=FILL_AREA_STYLE('',(#" << sid+6 << "))" << endstr;
mOutput << "#" << sid+6 << "=FILL_AREA_STYLE_COLOUR('',#" << sid+7 << ")" << endstr;
mOutput << "#" << sid+7 << "=COLOUR_RGB(''," << fColor.r << "," << fColor.g << "," << fColor.b << ")" << endstr;
/* this is the geometry */
mOutput << "#" << sid+8 << "=FACE_SURFACE('',(#" << sid+13 << "),#" << sid+9<< ",.T.)" << endstr; /* the face that must be referenced in 29 */
/* 2 directions of the plane */
mOutput << "#" << sid+9 << "=PLANE('',#" << sid+10 << ")" << endstr;
mOutput << "#" << sid+10 << "=AXIS2_PLACEMENT_3D('',#" << pid1 << ", #" << sid+11 << ",#" << sid+12 << ")" << endstr;
mOutput << "#" << sid+11 << "=DIRECTION('',(" << dv12.x << "," << dv12.y << "," << dv12.z << "))" << endstr;
mOutput << "#" << sid+12 << "=DIRECTION('',(" << dv13.x << "," << dv13.y << "," << dv13.z << "))" << endstr;
mOutput << "#" << sid+13 << "=FACE_BOUND('',#" << sid+14 << ",.T.)" << endstr;
mOutput << "#" << sid+14 << "=EDGE_LOOP('',(#" << sid+15 << ",#" << sid+16 << ",#" << sid+17 << "))" << endstr;
/* edge loop */
mOutput << "#" << sid+15 << "=ORIENTED_EDGE('',*,*,#" << sid+18 << ",.T.)" << endstr;
mOutput << "#" << sid+16 << "=ORIENTED_EDGE('',*,*,#" << sid+19 << ",.T.)" << endstr;
mOutput << "#" << sid+17 << "=ORIENTED_EDGE('',*,*,#" << sid+20 << ",.T.)" << endstr;
/* oriented edges */
mOutput << "#" << sid+18 << "=EDGE_CURVE('',#" << pid1+1 << ",#" << pid2+1 << ",#" << sid+21 << ",.F.)" << endstr;
mOutput << "#" << sid+19 << "=EDGE_CURVE('',#" << pid2+1 << ",#" << pid3+1 << ",#" << sid+22 << ",.T.)" << endstr;
mOutput << "#" << sid+20 << "=EDGE_CURVE('',#" << pid3+1 << ",#" << pid1+1 << ",#" << sid+23 << ",.T.)" << endstr;
/* 3 lines and 3 vectors for the lines for the 3 edge curves */
mOutput << "#" << sid+21 << "=LINE('',#" << pid1 << ",#" << sid+24 << ")" << endstr;
mOutput << "#" << sid+22 << "=LINE('',#" << pid2 << ",#" << sid+25 << ")" << endstr;
mOutput << "#" << sid+23 << "=LINE('',#" << pid3 << ",#" << sid+26 << ")" << endstr;
mOutput << "#" << sid+24 << "=VECTOR('',#" << sid+27 << ",1.0)" << endstr;
mOutput << "#" << sid+25 << "=VECTOR('',#" << sid+28 << ",1.0)" << endstr;
mOutput << "#" << sid+26 << "=VECTOR('',#" << sid+29 << ",1.0)" << endstr;
mOutput << "#" << sid+27 << "=DIRECTION('',(" << dv12.x << "," << dv12.y << "," << dv12.z << "))" << endstr;
mOutput << "#" << sid+28 << "=DIRECTION('',(" << dv23.x << "," << dv23.y << "," << dv23.z << "))" << endstr;
mOutput << "#" << sid+29 << "=DIRECTION('',(" << dv31.x << "," << dv31.y << "," << dv31.z << "))" << endstr;
ind += faceEntryLen; // increase counter
}
}
mOutput << "ENDSEC" << endstr; // end of data section
mOutput << "END-ISO-10303-21" << endstr; // end of file
}
#endif
#endif<|fim▁end|> | |
<|file_name|>null_object.py<|end_file_name|><|fim▁begin|>"""
Encapsulate the absence of an object by providing a substitutable
alternative that offers suitable default do nothing behavior.
"""
import abc
class AbstractObject(metaclass=abc.ABCMeta):
"""
Declare the interface for Client's collaborator.
Implement default behavior for the interface common to all classes,
as appropriate.
"""
@abc.abstractmethod
def request(self):
pass
class RealObject(AbstractObject):
"""
Define a concrete subclass of AbstractObject whose instances provide
useful behavior that Client expects.
"""
def request(self):
pass
class NullObject(AbstractObject):
"""
Provide an interface identical to AbstractObject's so that a null
object can be substituted for a real object.
Implement its interface to do nothing. What exactly it means to do
nothing depends on what sort of behavior Client is expecting.
"""<|fim▁hole|> pass<|fim▁end|> |
def request(self): |
<|file_name|>expr-if-struct.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests for if as expressions returning nominal types
#[derive(Copy)]
struct I { i: int }
fn test_rec() {
let rs = if true { I {i: 100} } else { I {i: 101} };
assert_eq!(rs.i, 100);
}
#[derive(Copy, Debug)]
enum mood { happy, sad, }
<|fim▁hole|> }
fn ne(&self, other: &mood) -> bool { !(*self).eq(other) }
}
fn test_tag() {
let rs = if true { mood::happy } else { mood::sad };
assert_eq!(rs, mood::happy);
}
pub fn main() { test_rec(); test_tag(); }<|fim▁end|> | impl PartialEq for mood {
fn eq(&self, other: &mood) -> bool {
((*self) as uint) == ((*other) as uint) |
<|file_name|>actions.test.js<|end_file_name|><|fim▁begin|>import {
defaultAction,
} from '../actions';
import {
DEFAULT_ACTION,
} from '../constants';
describe('Marginals actions', () => {
describe('Default Action', () => {
it('has a type of DEFAULT_ACTION', () => {
const expected = {<|fim▁hole|> });
});<|fim▁end|> | type: DEFAULT_ACTION,
};
expect(defaultAction()).toEqual(expected);
}); |
<|file_name|>installment.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python
# -*- coding: utf-8 -*-
import re
from datetime import datetime, date
from decimal import Decimal
from translations import _t, _a
from model import Model
from lib.query_builder import Query
from lib.money import Money
import payment
import membership
import package
import settings
class Installment(Model):<|fim▁hole|> fields_for_save = ['year','month','membership_id','amount', 'status']
default_order = 'year ASC, month ASC'
def __init__(self, data = {}):
self._year = datetime.today().year
self.month = 0
self.membership_id = None
self._membership = None
self._amount = 0
self._payments = None
self._status = 'waiting'
self.ignore_recharge = False
self.ignore_second_recharge = False
Model.__init__(self, data)
@property
def year(self):
return self._year
@year.setter
def year(self, value):
try:
self._year = int(value)
except:
self._year = 0
def to_label(self):
return self.month_name() + " " + str(self.year)
@property
def amount(self):
return self._amount/100
@amount.setter
def amount(self,value):
try:
v = int(Decimal(value)*100)
except:
v = 0
self._amount = v
def description(self):
return self.membership.klass_or_package.name + ' ' + self.month_name() + ' ' + str(self.year)
def paid(self):
return sum(map(lambda p: p.amount, self.payments),0)
def is_paid(self):
return self._status != 'waiting'
def total(self, ignore_recharge = None, ignore_second_recharge = None):
if ignore_recharge is not None: self.ignore_recharge = ignore_recharge
if ignore_second_recharge is not None: self.ignore_second_recharge = ignore_second_recharge
return self.amount+self.get_recharge()
def get_recharge(self, after_day = None, recharge_value = None, second_recharge_value = None):
sets = settings.Settings.get_settings()
if after_day is None: after_day = sets.recharge_after
if recharge_value is None: recharge_value = sets.recharge_value
if second_recharge_value is None: second_recharge_value = sets.second_recharge_value
recharge = 0
sets = settings.Settings.get_settings()
today = self.__class__._today().date()
beginning_of_month = date(today.year, today.month, 1)
if self._status != 'paid':
rv = ''
if second_recharge_value != '' and self.date() < beginning_of_month and not self.ignore_second_recharge:
rv = second_recharge_value
elif recharge_value != '' and self.date(after_day) < today and not self.ignore_recharge:
rv = recharge_value
if rv != '':
if re.match('^\d+%$',rv):
recharge = self.amount*(int(rv[0:-1]))/100
elif re.match('^\d+$',rv):
recharge = int(rv)
return recharge
def date(self, after_day = None):
if after_day is None: after_day = settings.Settings.get_settings().recharge_after
return datetime.strptime(str(self.year)+"-"+str(self.month+1)+"-"+str(after_day),'%Y-%m-%d').date()
def detailed_total(self):
if self._status != 'paid_with_interests':
recharge = self.get_recharge()
recharge = '(+'+str(recharge)+')' if recharge > 0 else ''
else:
recharge = '(+'+str(self.paid() - self.amount)+')'
return '$'+str(self.amount)+recharge
def detailed_to_pay(self):
return '$'+str(self.to_pay())
def to_pay(self, ignore_recharge = None, ignore_second_recharge = None):
if ignore_recharge is not None: self.ignore_recharge = ignore_recharge
if ignore_second_recharge is not None: self.ignore_second_recharge = ignore_second_recharge
return self.total()-self.paid()
def month_name(self):
return _t('months')[self.month]
@property
def status(self):
return _a(self.cls_name(), self._status)
@status.setter
def status(self, value):
self._status = value
@property
def membership(self):
if self.membership_id and self._membership is None:
self._membership = membership.Membership.find(self.membership_id)
return self._membership
@membership.setter
def membership(self, value):
self.membership_id = None if value is None else value.id
self._membership = value
@property
def payments(self):
if self._payments is None: self._payments = payment.Payment.for_installment(self.id).do_get()
return self._payments
def to_db(self):
return {'year': self.year, 'month': self.month, 'membership_id': self.membership_id, 'amount': self.amount, 'status': self._status}
def _is_valid(self):
self.validate_numericallity_of('month', great_than_or_equal = 0, less_than_or_equal = 11)
self.validate_numericallity_of('amount', great_than_or_equal = 0, only_integer = False)
def add_payment(self, data = None):
if data is None: data = {}
if 'ignore_recharge' in data: self.ignore_recharge = data['ignore_recharge']
if 'amount' not in data: data['amount'] = self.to_pay()
amount = Money(data['amount'])
if amount <= self.to_pay():
data['installment_id'] = self.id
p = payment.Payment(data)
p.user = self.get_student()
if p.save():
self.update_status()
return p
else:
return p.full_errors()
else:
return "No se puede agregar un pago con mayor valor que el resto a pagar. Saldo: " + str(self.to_pay()) + ", Ingresado: " + str(amount)
def update_status(self):
self._status = 'waiting'
self._payments = None
if int(self.to_pay()) == 0:
if self.get_recharge() > 0 and self.ignore_recharge is False:
self._status = 'paid_with_interests'
else:
self._status = 'paid'
self.save()
def get_student_id(self):
s = self.get_student()
return s.id if s else None
def get_student(self):
return self.membership.student
def payments_details(self):
return "\n".join(map(lambda p: p.to_s(), self.payments))
def build_payment(self, data = {}):
p = payment.Payment(data)
p.installment = self
self.payments.append(p)
return p
@classmethod
def for_membership(cls,membership_id):
return cls.where('membership_id', membership_id)
def before_delete(self):
for p in self.payments:
p.description = p.description
p.installment = None
p.save(validate=False)
return True
@classmethod
def for_klass(cls, klass, q = None):
if q is None: q = Query(cls)
where = 'memberships.for_id = :klass_id AND memberships.for_type = "Klass"'
args = {'klass_id': klass.id}
packages = package.Package.with_klass(klass)
if packages.anything():
p_ids = ','.join(map(lambda p: str(p.id), packages))
where = '('+where+') OR (memberships.for_id IN ({0}) AND memberships.for_type = "Package")'.format(p_ids)
return q.set_join('LEFT JOIN memberships ON memberships.id = installments.membership_id').where(where,args)
@classmethod
def only_active_users(cls, q = None):
if q is None: q = Query(cls)
return q.set_join('LEFT JOIN memberships ON memberships.id = installments.membership_id LEFT JOIN users ON memberships.student_id = users.id').where('users.inactive = 0')
@classmethod
def overdues(cls, recharge_after = None, q = None):
if q is None: q = Query(cls)
today = cls._today()
if recharge_after is None: recharge_after = settings.Settings.get_settings().recharge_after
month = today.month-1
year = today.year
if today.day <= recharge_after: month = month-1
if month == -1:
month = 11
year = year-1
return q.where('status = "waiting" AND ((year = :year AND month <= :month) OR year < :year)', {'year': year, 'month': month})
@classmethod
def to_pay_for(cls,user):
today = cls._today()
w = 'status = "waiting" AND (memberships.student_id = :student_id OR users.family = :family)'
args = {'student_id': user.id, 'family': user.family}
return cls.where(w,args).set_join('LEFT JOIN memberships ON memberships.id = installments.membership_id LEFT JOIN users ON memberships.student_id = users.id').order_by('year ASC, month ASC')
@classmethod
def _today(cls):
return datetime.today()<|fim▁end|> | table = 'installments' |
<|file_name|>BuildFacilitiesState.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2015 OpenXcom Developers.
*
* This file is part of OpenXcom.
*
* OpenXcom is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* OpenXcom is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenXcom. If not, see <http://www.gnu.org/licenses/>.
*/
#include "BuildFacilitiesState.h"
#include "../Engine/Game.h"
#include "../Mod/Mod.h"
#include "../Engine/LocalizedText.h"
#include "../Engine/Options.h"
#include "../Interface/TextButton.h"
#include "../Interface/Window.h"
#include "../Interface/Text.h"
#include "../Interface/TextList.h"
#include "../Mod/RuleBaseFacility.h"
<|fim▁hole|>
namespace OpenXcom
{
/**
* Initializes all the elements in the Build Facilities window.
* @param game Pointer to the core game.
* @param base Pointer to the base to get info from.
* @param state Pointer to the base state to refresh.
*/
BuildFacilitiesState::BuildFacilitiesState(Base *base, State *state) : _base(base), _state(state)
{
_screen = false;
// Create objects
_window = new Window(this, 128, 160, 192, 40, POPUP_VERTICAL);
_btnOk = new TextButton(112, 16, 200, 176);
_lstFacilities = new TextList(104, 104, 200, 64);
_txtTitle = new Text(118, 17, 197, 48);
// Set palette
setInterface("selectFacility");
add(_window, "window", "selectFacility");
add(_btnOk, "button", "selectFacility");
add(_txtTitle, "text", "selectFacility");
add(_lstFacilities, "list", "selectFacility");
centerAllSurfaces();
// Set up objects
_window->setBackground(_game->getMod()->getSurface("BACK05.SCR"));
_btnOk->setText(tr("STR_OK"));
_btnOk->onMouseClick((ActionHandler)&BuildFacilitiesState::btnOkClick);
_btnOk->onKeyboardPress((ActionHandler)&BuildFacilitiesState::btnOkClick, Options::keyCancel);
_txtTitle->setBig();
_txtTitle->setAlign(ALIGN_CENTER);
_txtTitle->setText(tr("STR_INSTALLATION"));
_lstFacilities->setColumns(1, 104);
_lstFacilities->setSelectable(true);
_lstFacilities->setBackground(_window);
_lstFacilities->setMargin(2);
_lstFacilities->setWordWrap(true);
_lstFacilities->setScrolling(true, 0);
_lstFacilities->onMouseClick((ActionHandler)&BuildFacilitiesState::lstFacilitiesClick);
PopulateBuildList();
}
/**
*
*/
BuildFacilitiesState::~BuildFacilitiesState()
{
}
/**
* Populates the build list from the current "available" facilities.
*/
void BuildFacilitiesState::PopulateBuildList()
{
const std::vector<std::string> &facilities = _game->getMod()->getBaseFacilitiesList();
for (std::vector<std::string>::const_iterator i = facilities.begin(); i != facilities.end(); ++i)
{
RuleBaseFacility *rule = _game->getMod()->getBaseFacility(*i);
if (_game->getSavedGame()->isResearched(rule->getRequirements()) && !rule->isLift())
_facilities.push_back(rule);
}
for (std::vector<RuleBaseFacility*>::iterator i = _facilities.begin(); i != _facilities.end(); ++i)
{
_lstFacilities->addRow(1, tr((*i)->getType()).c_str());
}
}
/**
* The player can change the selected base
* or change info on other screens.
*/
void BuildFacilitiesState::init()
{
_state->init();
State::init();
}
/**
* Returns to the previous screen.
* @param action Pointer to an action.
*/
void BuildFacilitiesState::btnOkClick(Action *)
{
_game->popState();
}
/**
* Places the selected facility.
* @param action Pointer to an action.
*/
void BuildFacilitiesState::lstFacilitiesClick(Action *)
{
_game->pushState(new PlaceFacilityState(_base, _facilities[_lstFacilities->getSelectedRow()]));
}
}<|fim▁end|> | #include "../Savegame/SavedGame.h"
#include "PlaceFacilityState.h"
|
<|file_name|>application_tokens.d.ts<|end_file_name|><|fim▁begin|>import { OpaqueToken, Provider } from 'angular2/src/core/di';
/**
* An {@link angular2/di/OpaqueToken} representing the application root type in the {@link
* Injector}.
*
* ```
* @Component(...)
* class MyApp {
* ...
* }
*
* bootstrap(MyApp).then((appRef:ApplicationRef) {
* expect(appRef.injector.get(appComponentTypeToken)).toEqual(MyApp);
* });
*
* ```
*/
export declare const APP_COMPONENT: OpaqueToken;
/**
* A DI Token representing a unique string id assigned to the application by Angular and used
* primarily for prefixing application attributes and CSS styles when<|fim▁hole|> * {@link ViewEncapsulation#Emulated} is being used.
*
* If you need to avoid randomly generated value to be used as an application id, you can provide
* a custom value via a DI provider <!-- TODO: provider --> configuring the root {@link Injector}
* using this token.
*/
export declare const APP_ID: OpaqueToken;
/**
* Bindings that will generate a random APP_ID_TOKEN.
*/
export declare const APP_ID_RANDOM_PROVIDER: Provider;<|fim▁end|> | |
<|file_name|>test_ui.py<|end_file_name|><|fim▁begin|># License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).<|fim▁hole|>
class UICase(HttpCase):
def test_ui_website(self):
"""Test frontend tour."""
tour = "website_sale_product_brand"
self.phantom_js(
url_path="/shop",
code="odoo.__DEBUG__.services['web_tour.tour']"
".run('%s')" % tour,
ready="odoo.__DEBUG__.services['web_tour.tour']"
".tours.%s.ready" % tour)<|fim▁end|> |
from odoo.tests.common import HttpCase |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
var express = require('express');
var passport = require('passport');
var config = require('../config/environment');
var User = require('../api/user/user.model');
// Passport Configuration<|fim▁hole|>require('./twitter/passport').setup(User, config);
require('./basic/passport').setup(User, config);
var router = express.Router();
router.use('/local', require('./local'));
router.use('/facebook', require('./facebook'));
router.use('/twitter', require('./twitter'));
router.use('/oauth', require('./basic'));
module.exports = router;<|fim▁end|> | require('./local/passport').setup(User, config);
require('./facebook/passport').setup(User, config); |
<|file_name|>PorterStemmer.java<|end_file_name|><|fim▁begin|>/**
* Licensing arrangement (from website FAQ):
*
* The software is completely free for any purpose, unless notes at the
* head of the program text indicates otherwise (which is rare). In any
* case, the notes about licensing are never more restrictive than the
* BSD License.
*
*/
package com.novartis.pcs.ontology.service.mapper;
/*
Porter stemmer in Java. The original paper is in
Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14,
no. 3, pp 130-137,
See also http://www.tartarus.org/~martin/PorterStemmer/index.html
Bug 1 (reported by Gonzalo Parra 16/10/99) fixed as marked below.
Tthe words 'aed', 'eed', 'oed' leave k at 'a' for step 3, and b[k-1]
is then out outside the bounds of b.
Similarly,
Bug 2 (reported by Steve Dyrdahl 22/2/00) fixed as marked below.
'ion' by itself leaves j = -1 in the test for 'ion' in step 5, and
b[j] is then outside the bounds of b.
Release 3.
[ This version is derived from Release 3, modified by Brian Goetz to
optimize for fewer object creations. ]
*/
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
*
* Stemmer, implementing the Porter Stemming Algorithm
*
* The Stemmer class transforms a word into its root form. The input
* word can be provided a character at time (by calling add()), or at once
* by calling one of the various stem(something) methods.
*/
class PorterStemmer
{
private char[] b;
private int i, /* offset into b */
j, k, k0;
private boolean dirty = false;
private static final int INC = 50; /* unit of size whereby b is increased */
private static final int EXTRA = 1;
public PorterStemmer() {
b = new char[INC];
i = 0;
}
/**
* reset() resets the stemmer so it can stem another word. If you invoke
* the stemmer by calling add(char) and then stem(), you must call reset()
* before starting another word.
*/
public void reset() { i = 0; dirty = false; }
/**
* Add a character to the word being stemmed. When you are finished
* adding characters, you can call stem(void) to process the word.
*/
public void add(char ch) {
if (b.length <= i + EXTRA) {
char[] new_b = new char[b.length+INC];
System.arraycopy(b, 0, new_b, 0, b.length);
b = new_b;
}
b[i++] = ch;
}
/**
* After a word has been stemmed, it can be retrieved by toString(),
* or a reference to the internal buffer can be retrieved by getResultBuffer
* and getResultLength (which is generally more efficient.)
*/
@Override
public String toString() { return new String(b,0,i); }
/**
* Returns the length of the word resulting from the stemming process.
*/
public int getResultLength() { return i; }
/**
* Returns a reference to a character buffer containing the results of
* the stemming process. You also need to consult getResultLength()
* to determine the length of the result.
*/
public char[] getResultBuffer() { return b; }
/* cons(i) is true <=> b[i] is a consonant. */
private final boolean cons(int i) {
switch (b[i]) {
case 'a': case 'e': case 'i': case 'o': case 'u':
return false;
case 'y':
return (i==k0) ? true : !cons(i-1);
default:
return true;
}
}
/* m() measures the number of consonant sequences between k0 and j. if c is
a consonant sequence and v a vowel sequence, and <..> indicates arbitrary
presence,
<c><v> gives 0
<c>vc<v> gives 1
<c>vcvc<v> gives 2
<c>vcvcvc<v> gives 3
....
*/
private final int m() {
int n = 0;
int i = k0;
while(true) {
if (i > j)
return n;
if (! cons(i))
break;
i++;
}
i++;
while(true) {
while(true) {
if (i > j)
return n;
if (cons(i))
break;
i++;
}
i++;
n++;
while(true) {
if (i > j)
return n;
if (! cons(i))
break;
i++;
}
i++;
}
}
/* vowelinstem() is true <=> k0,...j contains a vowel */
private final boolean vowelinstem() {
int i;
for (i = k0; i <= j; i++)
if (! cons(i))
return true;
return false;
}
/* doublec(j) is true <=> j,(j-1) contain a double consonant. */
private final boolean doublec(int j) {
if (j < k0+1)
return false;
if (b[j] != b[j-1])
return false;
return cons(j);
}
/* cvc(i) is true <=> i-2,i-1,i has the form consonant - vowel - consonant
and also if the second c is not w,x or y. this is used when trying to
restore an e at the end of a short word. e.g.
cav(e), lov(e), hop(e), crim(e), but
snow, box, tray.
*/
private final boolean cvc(int i) {
if (i < k0+2 || !cons(i) || cons(i-1) || !cons(i-2))
return false;
else {
int ch = b[i];
if (ch == 'w' || ch == 'x' || ch == 'y') return false;
}
return true;
}
private final boolean ends(String s) {
int l = s.length();
int o = k-l+1;
if (o < k0)
return false;
for (int i = 0; i < l; i++)
if (b[o+i] != s.charAt(i))
return false;
j = k-l;
return true;
}
/* setto(s) sets (j+1),...k to the characters in the string s, readjusting
k. */
void setto(String s) {
int l = s.length();
int o = j+1;
for (int i = 0; i < l; i++)
b[o+i] = s.charAt(i);
k = j+l;
dirty = true;
}
/* r(s) is used further down. */
void r(String s) { if (m() > 0) setto(s); }
/* step1() gets rid of plurals and -ed or -ing. e.g.
caresses -> caress
ponies -> poni
ties -> ti
caress -> caress
cats -> cat
feed -> feed
agreed -> agree
disabled -> disable
matting -> mat
mating -> mate
meeting -> meet
milling -> mill
messing -> mess
meetings -> meet
*/
private final void step1() {
if (b[k] == 's') {
if (ends("sses")) k -= 2;
else if (ends("ies")) setto("i");
else if (b[k-1] != 's') k--;
}
if (ends("eed")) {
if (m() > 0)
k--;
}
else if ((ends("ed") || ends("ing")) && vowelinstem()) {
k = j;
if (ends("at")) setto("ate");
else if (ends("bl")) setto("ble");
else if (ends("iz")) setto("ize");
else if (doublec(k)) {
int ch = b[k--];
if (ch == 'l' || ch == 's' || ch == 'z')
k++;
}
else if (m() == 1 && cvc(k))
setto("e");
}
}
/* step2() turns terminal y to i when there is another vowel in the stem. */
private final void step2() {
if (ends("y") && vowelinstem()) {
b[k] = 'i';
dirty = true;
}
}
/* step3() maps double suffices to single ones. so -ization ( = -ize plus
-ation) maps to -ize etc. note that the string before the suffix must give
m() > 0. */
private final void step3() {
if (k == k0) return; /* For Bug 1 */
switch (b[k-1]) {
case 'a':
if (ends("ational")) { r("ate"); break; }
if (ends("tional")) { r("tion"); break; }
break;
case 'c':
if (ends("enci")) { r("ence"); break; }
if (ends("anci")) { r("ance"); break; }
break;
case 'e':
if (ends("izer")) { r("ize"); break; }
break;
case 'l':
if (ends("bli")) { r("ble"); break; }
if (ends("alli")) { r("al"); break; }
if (ends("entli")) { r("ent"); break; }
if (ends("eli")) { r("e"); break; }
if (ends("ousli")) { r("ous"); break; }
break;
case 'o':
if (ends("ization")) { r("ize"); break; }
if (ends("ation")) { r("ate"); break; }
if (ends("ator")) { r("ate"); break; }
break;
case 's':
if (ends("alism")) { r("al"); break; }
if (ends("iveness")) { r("ive"); break; }
if (ends("fulness")) { r("ful"); break; }
if (ends("ousness")) { r("ous"); break; }
break;
case 't':
if (ends("aliti")) { r("al"); break; }
if (ends("iviti")) { r("ive"); break; }
if (ends("biliti")) { r("ble"); break; }
break;
case 'g':
if (ends("logi")) { r("log"); break; }
}
}
/* step4() deals with -ic-, -full, -ness etc. similar strategy to step3. */
private final void step4() {
switch (b[k]) {
case 'e':
if (ends("icate")) { r("ic"); break; }
if (ends("ative")) { r(""); break; }
if (ends("alize")) { r("al"); break; }
break;
case 'i':
if (ends("iciti")) { r("ic"); break; }
break;
case 'l':
if (ends("ical")) { r("ic"); break; }
if (ends("ful")) { r(""); break; }
break;
case 's':
if (ends("ness")) { r(""); break; }
break;
}
}
/* step5() takes off -ant, -ence etc., in context <c>vcvc<v>. */
private final void step5() {
if (k == k0) return; /* for Bug 1 */
switch (b[k-1]) {
case 'a':
if (ends("al")) break;
return;
case 'c':
if (ends("ance")) break;
if (ends("ence")) break;
return;
case 'e':
if (ends("er")) break; return;
case 'i':
if (ends("ic")) break; return;
case 'l':
if (ends("able")) break;
if (ends("ible")) break; return;
case 'n':
if (ends("ant")) break;
if (ends("ement")) break;
if (ends("ment")) break;
/* element etc. not stripped before the m */
if (ends("ent")) break;
return;
case 'o':
if (ends("ion") && j >= 0 && (b[j] == 's' || b[j] == 't')) break;
/* j >= 0 fixes Bug 2 */
if (ends("ou")) break;
return;
/* takes care of -ous */
case 's':
if (ends("ism")) break;
return;
case 't':
if (ends("ate")) break;
if (ends("iti")) break;
return;
case 'u':
if (ends("ous")) break;
return;
case 'v':
if (ends("ive")) break;
return;
case 'z':
if (ends("ize")) break;
return;
default:
return;
}
if (m() > 1)
k = j;
}
/* step6() removes a final -e if m() > 1. */
private final void step6() {
j = k;
if (b[k] == 'e') {
int a = m();
if (a > 1 || a == 1 && !cvc(k-1))
k--;
}
if (b[k] == 'l' && doublec(k) && m() > 1)
k--;
}
/**
* Stem a word provided as a String. Returns the result as a String.
*/
public String stem(String s) {
if (stem(s.toCharArray(), s.length()))
return toString();
else
return s;
}
/** Stem a word contained in a char[]. Returns true if the stemming process
* resulted in a word different from the input. You can retrieve the
* result with getResultLength()/getResultBuffer() or toString().
*/
public boolean stem(char[] word) {
return stem(word, word.length);
}
/** Stem a word contained in a portion of a char[] array. Returns
* true if the stemming process resulted in a word different from
* the input. You can retrieve the result with
* getResultLength()/getResultBuffer() or toString().
*/
public boolean stem(char[] wordBuffer, int offset, int wordLen) {
reset();
if (b.length < wordLen) {
char[] new_b = new char[wordLen + EXTRA];
b = new_b;
}
System.arraycopy(wordBuffer, offset, b, 0, wordLen);
i = wordLen;
return stem(0);
}
/** Stem a word contained in a leading portion of a char[] array.
* Returns true if the stemming process resulted in a word different
* from the input. You can retrieve the result with
* getResultLength()/getResultBuffer() or toString().
*/
public boolean stem(char[] word, int wordLen) {
return stem(word, 0, wordLen);
}
/** Stem the word placed into the Stemmer buffer through calls to add().
* Returns true if the stemming process resulted in a word different
* from the input. You can retrieve the result with
* getResultLength()/getResultBuffer() or toString().
*/
public boolean stem() {
return stem(0);
}
public boolean stem(int i0) {
k = i - 1;
k0 = i0;
if (k > k0+1) {
step1(); step2(); step3(); step4(); step5(); step6();
}
// Also, a word is considered dirty if we lopped off letters
// Thanks to Ifigenia Vairelles for pointing this out.
if (i != k+1)
dirty = true;
i = k+1;
return dirty;
}
/** Test program for demonstrating the Stemmer. It reads a file and
* stems each word, writing the result to standard out.
* Usage: Stemmer file-name
*/<|fim▁hole|> for (int i = 0; i < args.length; i++) {
try {
InputStream in = new FileInputStream(args[i]);
byte[] buffer = new byte[1024];
int bufferLen, offset, ch;
bufferLen = in.read(buffer);
offset = 0;
s.reset();
while(true) {
if (offset < bufferLen)
ch = buffer[offset++];
else {
bufferLen = in.read(buffer);
offset = 0;
if (bufferLen < 0)
ch = -1;
else
ch = buffer[offset++];
}
if (Character.isLetter((char) ch)) {
s.add(Character.toLowerCase((char) ch));
}
else {
s.stem();
System.out.print(s.toString());
s.reset();
if (ch < 0)
break;
else {
System.out.print((char) ch);
}
}
}
in.close();
}
catch (IOException e) {
System.out.println("error reading " + args[i]);
}
}
}
}<|fim▁end|> | public static void main(String[] args) {
PorterStemmer s = new PorterStemmer();
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.<|fim▁hole|># The full license is in the file COPYING.txt, distributed with this software.
# -----------------------------------------------------------------------------<|fim▁end|> | #
# Distributed under the terms of the Modified BSD License.
# |
<|file_name|>if_.rs<|end_file_name|><|fim▁begin|>//! Network interface name resolution.
//!
//! Uses Linux and/or POSIX functions to resolve interface names like "eth0"
//! or "socan1" into device numbers.
use libc::c_uint;
use crate::{Result, Error, NixPath};
/// Resolve an interface into a interface number.
pub fn if_nametoindex<P: ?Sized + NixPath>(name: &P) -> Result<c_uint> {
let if_index = name.with_nix_path(|name| unsafe { libc::if_nametoindex(name.as_ptr()) })?;
if if_index == 0 {
Err(Error::last())
} else {
Ok(if_index)
}
}
libc_bitflags!(
/// Standard interface flags, used by `getifaddrs`
pub struct InterfaceFlags: libc::c_int {
/// Interface is running. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
IFF_UP;
/// Valid broadcast address set. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
IFF_BROADCAST;
/// Internal debugging flag. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
IFF_DEBUG;
/// Interface is a loopback interface. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
IFF_LOOPBACK;
/// Interface is a point-to-point link. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
IFF_POINTOPOINT;
/// Avoid use of trailers. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
#[cfg(any(target_os = "android",
target_os = "fuchsia",
target_os = "ios",
target_os = "linux",
target_os = "macos",
target_os = "netbsd",
target_os = "solaris"))]
IFF_NOTRAILERS;
/// Interface manages own routes.
#[cfg(any(target_os = "dragonfly"))]
IFF_SMART;
/// Resources allocated. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
#[cfg(any(target_os = "android",
target_os = "dragonfly",
target_os = "freebsd",
target_os = "fuchsia",
target_os = "ios",
target_os = "linux",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "solaris"))]
IFF_RUNNING;
/// No arp protocol, L2 destination address not set. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
IFF_NOARP;
/// Interface is in promiscuous mode. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
IFF_PROMISC;
/// Receive all multicast packets. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
IFF_ALLMULTI;
/// Master of a load balancing bundle. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
#[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))]
IFF_MASTER;
/// transmission in progress, tx hardware queue is full
#[cfg(any(target_os = "freebsd",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "ios"))]
IFF_OACTIVE;
/// Protocol code on board.
#[cfg(target_os = "solaris")]
IFF_INTELLIGENT;
/// Slave of a load balancing bundle. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
#[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))]
IFF_SLAVE;
/// Can't hear own transmissions.
#[cfg(any(target_os = "dragonfly",
target_os = "freebsd",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "osx"))]
IFF_SIMPLEX;
/// Supports multicast. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
IFF_MULTICAST;
/// Per link layer defined bit.
#[cfg(any(target_os = "dragonfly",
target_os = "freebsd",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "ios"))]
IFF_LINK0;
/// Multicast using broadcast.
#[cfg(any(target_os = "solaris"))]
IFF_MULTI_BCAST;
/// Is able to select media type via ifmap. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
#[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))]
IFF_PORTSEL;
/// Per link layer defined bit.
#[cfg(any(target_os = "dragonfly",
target_os = "freebsd",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "ios"))]
IFF_LINK1;
/// Non-unique address.
#[cfg(any(target_os = "solaris"))]
IFF_UNNUMBERED;
/// Auto media selection active. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
#[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))]
IFF_AUTOMEDIA;
/// Per link layer defined bit.
#[cfg(any(target_os = "dragonfly",
target_os = "freebsd",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
target_os = "ios"))]
IFF_LINK2;
/// Use alternate physical connection.
#[cfg(any(target_os = "dragonfly",
target_os = "freebsd",
target_os = "macos",
target_os = "ios"))]
IFF_ALTPHYS;
/// DHCP controlls interface.
#[cfg(any(target_os = "solaris"))]
IFF_DHCPRUNNING;
/// The addresses are lost when the interface goes down. (see
/// [`netdevice(7)`](http://man7.org/linux/man-pages/man7/netdevice.7.html))
#[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))]
IFF_DYNAMIC;
/// Do not advertise.
#[cfg(any(target_os = "solaris"))]
IFF_PRIVATE;
/// Driver signals L1 up. Volatile.
#[cfg(any(target_os = "fuchsia", target_os = "linux"))]
IFF_LOWER_UP;
/// Interface is in polling mode.
#[cfg(any(target_os = "dragonfly"))]
IFF_POLLING_COMPAT;
/// Unconfigurable using ioctl(2).
#[cfg(any(target_os = "freebsd"))]
IFF_CANTCONFIG;
/// Do not transmit packets.
#[cfg(any(target_os = "solaris"))]
IFF_NOXMIT;
/// Driver signals dormant. Volatile.
#[cfg(any(target_os = "fuchsia", target_os = "linux"))]
IFF_DORMANT;
/// User-requested promisc mode.
#[cfg(any(target_os = "dragonfly", target_os = "freebsd"))]
IFF_PPROMISC;
/// Just on-link subnet.
#[cfg(any(target_os = "solaris"))]
IFF_NOLOCAL;
/// Echo sent packets. Volatile.
#[cfg(any(target_os = "fuchsia", target_os = "linux"))]
IFF_ECHO;
/// User-requested monitor mode.
#[cfg(any(target_os = "dragonfly", target_os = "freebsd"))]
IFF_MONITOR;
/// Address is deprecated.
#[cfg(any(target_os = "solaris"))]
IFF_DEPRECATED;
/// Static ARP.
#[cfg(any(target_os = "dragonfly", target_os = "freebsd"))]
IFF_STATICARP;
/// Address from stateless addrconf.
#[cfg(any(target_os = "solaris"))]
IFF_ADDRCONF;
/// Interface is in polling mode.
#[cfg(any(target_os = "dragonfly"))]
IFF_NPOLLING;
/// Router on interface.
#[cfg(any(target_os = "solaris"))]
IFF_ROUTER;<|fim▁hole|> #[cfg(any(target_os = "dragonfly"))]
IFF_IDIRECT;
/// Interface is winding down
#[cfg(any(target_os = "freebsd"))]
IFF_DYING;
/// No NUD on interface.
#[cfg(any(target_os = "solaris"))]
IFF_NONUD;
/// Interface is being renamed
#[cfg(any(target_os = "freebsd"))]
IFF_RENAMING;
/// Anycast address.
#[cfg(any(target_os = "solaris"))]
IFF_ANYCAST;
/// Don't exchange routing info.
#[cfg(any(target_os = "solaris"))]
IFF_NORTEXCH;
/// Do not provide packet information
#[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))]
IFF_NO_PI as libc::c_int;
/// TUN device (no Ethernet headers)
#[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))]
IFF_TUN as libc::c_int;
/// TAP device
#[cfg(any(target_os = "android", target_os = "fuchsia", target_os = "linux"))]
IFF_TAP as libc::c_int;
/// IPv4 interface.
#[cfg(any(target_os = "solaris"))]
IFF_IPV4;
/// IPv6 interface.
#[cfg(any(target_os = "solaris"))]
IFF_IPV6;
/// in.mpathd test address
#[cfg(any(target_os = "solaris"))]
IFF_NOFAILOVER;
/// Interface has failed
#[cfg(any(target_os = "solaris"))]
IFF_FAILED;
/// Interface is a hot-spare
#[cfg(any(target_os = "solaris"))]
IFF_STANDBY;
/// Functioning but not used
#[cfg(any(target_os = "solaris"))]
IFF_INACTIVE;
/// Interface is offline
#[cfg(any(target_os = "solaris"))]
IFF_OFFLINE;
#[cfg(any(target_os = "solaris"))]
IFF_COS_ENABLED;
/// Prefer as source addr.
#[cfg(any(target_os = "solaris"))]
IFF_PREFERRED;
/// RFC3041
#[cfg(any(target_os = "solaris"))]
IFF_TEMPORARY;
/// MTU set with SIOCSLIFMTU
#[cfg(any(target_os = "solaris"))]
IFF_FIXEDMTU;
/// Cannot send / receive packets
#[cfg(any(target_os = "solaris"))]
IFF_VIRTUAL;
/// Local address in use
#[cfg(any(target_os = "solaris"))]
IFF_DUPLICATE;
/// IPMP IP interface
#[cfg(any(target_os = "solaris"))]
IFF_IPMP;
}
);<|fim▁end|> | /// Interface is in polling mode. |
<|file_name|>ddraw.rs<|end_file_name|><|fim▁begin|>// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
DEFINE_GUID!{CLSID_DirectDraw,
0xd7b70ee0, 0x4340, 0x11cf, 0xb0, 0x63, 0x00, 0x20, 0xaf, 0xc2, 0xcd, 0x35}
DEFINE_GUID!{CLSID_DirectDraw7,
0x3c305196, 0x50db, 0x11d3, 0x9c, 0xfe, 0x00, 0xc0, 0x4f, 0xd9, 0x30, 0xc5}
DEFINE_GUID!{CLSID_DirectDrawClipper,
0x593817a0, 0x7db3, 0x11cf, 0xa2, 0xde, 0x00, 0xaa, 0x00, 0xb9, 0x33, 0x56}
DEFINE_GUID!{IID_IDirectDraw,
0x6c14db80, 0xa733, 0x11ce, 0xa5, 0x21, 0x00, 0x20, 0xaf, 0x0b, 0xe5, 0x60}
DEFINE_GUID!{IID_IDirectDraw2,
0xb3a6f3e0, 0x2b43, 0x11cf, 0xa2, 0xde, 0x00, 0xaa, 0x00, 0xb9, 0x33, 0x56}
DEFINE_GUID!{IID_IDirectDraw4,
0x9c59509a, 0x39bd, 0x11d1, 0x8c, 0x4a, 0x00, 0xc0, 0x4f, 0xd9, 0x30, 0xc5}
DEFINE_GUID!{IID_IDirectDraw7,
0x15e65ec0, 0x3b9c, 0x11d2, 0xb9, 0x2f, 0x00, 0x60, 0x97, 0x97, 0xea, 0x5b}
DEFINE_GUID!{IID_IDirectDrawSurface,
0x6c14db81, 0xa733, 0x11ce, 0xa5, 0x21, 0x00, 0x20, 0xaf, 0x0b, 0xe5, 0x60}
DEFINE_GUID!{IID_IDirectDrawSurface2,
0x57805885, 0x6eec, 0x11cf, 0x94, 0x41, 0xa8, 0x23, 0x03, 0xc1, 0x0e, 0x27}
DEFINE_GUID!{IID_IDirectDrawSurface3,<|fim▁hole|> 0xda044e00, 0x69b2, 0x11d0, 0xa1, 0xd5, 0x00, 0xaa, 0x00, 0xb8, 0xdf, 0xbb}
DEFINE_GUID!{IID_IDirectDrawSurface4,
0x0b2b8630, 0xad35, 0x11d0, 0x8e, 0xa6, 0x00, 0x60, 0x97, 0x97, 0xea, 0x5b}
DEFINE_GUID!{IID_IDirectDrawSurface7,
0x06675a80, 0x3b9b, 0x11d2, 0xb9, 0x2f, 0x00, 0x60, 0x97, 0x97, 0xea, 0x5b}
DEFINE_GUID!{IID_IDirectDrawPalette,
0x6c14db84, 0xa733, 0x11ce, 0xa5, 0x21, 0x00, 0x20, 0xaf, 0x0b, 0xe5, 0x60}
DEFINE_GUID!{IID_IDirectDrawClipper,
0x6c14db85, 0xa733, 0x11ce, 0xa5, 0x21, 0x00, 0x20, 0xaf, 0x0b, 0xe5, 0x60}
DEFINE_GUID!{IID_IDirectDrawColorControl,
0x4b9f0ee0, 0x0d7e, 0x11d0, 0x9b, 0x06, 0x00, 0xa0, 0xc9, 0x03, 0xa3, 0xb8}
DEFINE_GUID!{IID_IDirectDrawGammaControl,
0x69c11c3e, 0xb46b, 0x11d1, 0xad, 0x7a, 0x00, 0xc0, 0x4f, 0xc2, 0x9b, 0x4e}<|fim▁end|> | |
<|file_name|>4-json-to-csv.py<|end_file_name|><|fim▁begin|># import the libraries that you need
import requests
import csv
# make a GET request to the OneSearch X-Service API
response = requests.get('http://onesearch.cuny.edu/PrimoWebServices'
'/xservice/search/brief?'
'&institution=KB'
'&query=any,contains,obama'
'&query=facet_rtype,exact,books'
'&loc=adaptor,primo_central_multiple_fe'
'&loc=local,scope:(KB,AL,CUNY_BEPRESS)'<|fim▁hole|>alldata = response.json()
# drill down into a smaller subset of the json
# and print this smaller bit of json
somedata = alldata['SEGMENTS']['JAGROOT']['RESULT']['FACETLIST']['FACET']\
[1]['FACET_VALUES']
print(somedata)
# open a file called mycsv.csv, then loop through the data
# and write to that file
with open('mycsv.csv', 'wb') as f:
writer = csv.writer(f)
for x in somedata:
writer.writerow([x['@KEY'], x['@VALUE']])<|fim▁end|> | '&json=true')
# take the JSON from the response
# and store it in a variable called alldata |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#<|fim▁hole|># Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.endpoint_policy.core import * # noqa
from keystone.endpoint_policy import routers # noqa<|fim▁end|> | # http://www.apache.org/licenses/LICENSE-2.0
# |
<|file_name|>test_environment.py<|end_file_name|><|fim▁begin|># Copyright 2012-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Discover environment and server configuration, initialize PyMongo client."""
import os
import socket
import sys
from functools import wraps
from test.utils import create_user
from test.version import Version
from unittest import SkipTest
import pymongo.errors
HAVE_SSL = True
try:
import ssl
except ImportError:
HAVE_SSL = False
ssl = None
HAVE_TORNADO = True
try:
import tornado
except ImportError:
HAVE_TORNADO = False
tornado = None
HAVE_ASYNCIO = True
try:
import asyncio
except ImportError:
HAVE_ASYNCIO = False
asyncio = None
HAVE_AIOHTTP = True
try:
import aiohttp
except ImportError:<|fim▁hole|> HAVE_AIOHTTP = False
aiohttp = None
# Copied from PyMongo.
def partition_node(node):
"""Split a host:port string into (host, int(port)) pair."""
host = node
port = 27017
idx = node.rfind(":")
if idx != -1:
host, port = node[:idx], int(node[idx + 1 :])
if host.startswith("["):
host = host[1:-1]
return host, port
def connected(client):
"""Convenience, wait for a new PyMongo MongoClient to connect."""
client.admin.command("ping") # Force connection.
return client
# If these are set to the empty string, substitute None.
db_user = os.environ.get("DB_USER") or None
db_password = os.environ.get("DB_PASSWORD") or None
CERT_PATH = os.environ.get(
"CERT_DIR", os.path.join(os.path.dirname(os.path.realpath(__file__)), "certificates")
)
CLIENT_PEM = os.path.join(CERT_PATH, "client.pem")
CA_PEM = os.path.join(CERT_PATH, "ca.pem")
MONGODB_X509_USERNAME = "CN=client,OU=kerneluser,O=10Gen,L=New York City,ST=New York,C=US"
def is_server_resolvable():
"""Returns True if 'server' is resolvable."""
socket_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(1)
try:
socket.gethostbyname("server")
return True
except socket.error:
return False
finally:
socket.setdefaulttimeout(socket_timeout)
class TestEnvironment(object):
def __init__(self):
self.initialized = False
self.host = None
self.port = None
self.mongod_started_with_ssl = False
self.mongod_validates_client_cert = False
self.server_is_resolvable = is_server_resolvable()
self.sync_cx = None
self.is_standalone = False
self.is_mongos = False
self.is_replica_set = False
self.rs_name = None
self.w = 1
self.hosts = None
self.arbiters = None
self.primary = None
self.secondaries = None
self.v8 = False
self.auth = False
self.uri = None
self.rs_uri = None
self.version = None
self.sessions_enabled = False
self.fake_hostname_uri = None
self.server_status = None
def setup(self):
assert not self.initialized
self.setup_sync_cx()
self.setup_auth_and_uri()
self.setup_version()
self.setup_v8()
self.server_status = self.sync_cx.admin.command("serverStatus")
self.initialized = True
def setup_sync_cx(self):
"""Get a synchronous PyMongo MongoClient and determine SSL config."""
host = os.environ.get("DB_IP", "localhost")
port = int(os.environ.get("DB_PORT", 27017))
connectTimeoutMS = 100
serverSelectionTimeoutMS = 100
socketTimeoutMS = 10000
try:
client = connected(
pymongo.MongoClient(
host,
port,
username=db_user,
password=db_password,
directConnection=True,
connectTimeoutMS=connectTimeoutMS,
socketTimeoutMS=socketTimeoutMS,
serverSelectionTimeoutMS=serverSelectionTimeoutMS,
tlsCAFile=CA_PEM,
ssl=True,
)
)
self.mongod_started_with_ssl = True
except pymongo.errors.ServerSelectionTimeoutError:
try:
client = connected(
pymongo.MongoClient(
host,
port,
username=db_user,
password=db_password,
directConnection=True,
connectTimeoutMS=connectTimeoutMS,
socketTimeoutMS=socketTimeoutMS,
serverSelectionTimeoutMS=serverSelectionTimeoutMS,
tlsCAFile=CA_PEM,
tlsCertificateKeyFile=CLIENT_PEM,
)
)
self.mongod_started_with_ssl = True
self.mongod_validates_client_cert = True
except pymongo.errors.ServerSelectionTimeoutError:
client = connected(
pymongo.MongoClient(
host,
port,
username=db_user,
password=db_password,
directConnection=True,
connectTimeoutMS=connectTimeoutMS,
socketTimeoutMS=socketTimeoutMS,
serverSelectionTimeoutMS=serverSelectionTimeoutMS,
)
)
response = client.admin.command("ismaster")
self.sessions_enabled = "logicalSessionTimeoutMinutes" in response
self.is_mongos = response.get("msg") == "isdbgrid"
if "setName" in response:
self.is_replica_set = True
self.rs_name = str(response["setName"])
self.w = len(response["hosts"])
self.hosts = set([partition_node(h) for h in response["hosts"]])
host, port = self.primary = partition_node(response["primary"])
self.arbiters = set([partition_node(h) for h in response.get("arbiters", [])])
self.secondaries = [
partition_node(m)
for m in response["hosts"]
if m != self.primary and m not in self.arbiters
]
elif not self.is_mongos:
self.is_standalone = True
# Reconnect to found primary, without short timeouts.
if self.mongod_started_with_ssl:
client = connected(
pymongo.MongoClient(
host,
port,
username=db_user,
password=db_password,
directConnection=True,
tlsCAFile=CA_PEM,
tlsCertificateKeyFile=CLIENT_PEM,
)
)
else:
client = connected(
pymongo.MongoClient(
host,
port,
username=db_user,
password=db_password,
directConnection=True,
ssl=False,
)
)
self.sync_cx = client
self.host = host
self.port = port
def setup_auth_and_uri(self):
"""Set self.auth and self.uri."""
if db_user or db_password:
if not (db_user and db_password):
sys.stderr.write("You must set both DB_USER and DB_PASSWORD, or neither\n")
sys.exit(1)
self.auth = True
uri_template = "mongodb://%s:%s@%s:%s/admin"
self.uri = uri_template % (db_user, db_password, self.host, self.port)
# If the hostname 'server' is resolvable, this URI lets us use it
# to test SSL hostname validation with auth.
self.fake_hostname_uri = uri_template % (db_user, db_password, "server", self.port)
else:
self.uri = "mongodb://%s:%s/admin" % (self.host, self.port)
self.fake_hostname_uri = "mongodb://%s:%s/admin" % ("server", self.port)
if self.rs_name:
self.rs_uri = self.uri + "?replicaSet=" + self.rs_name
def setup_version(self):
"""Set self.version to the server's version."""
self.version = Version.from_client(self.sync_cx)
def setup_v8(self):
"""Determine if server is running SpiderMonkey or V8."""
if self.sync_cx.server_info().get("javascriptEngine") == "V8":
self.v8 = True
@property
def storage_engine(self):
try:
return self.server_status.get("storageEngine", {}).get("name")
except AttributeError:
# Raised if self.server_status is None.
return None
def supports_transactions(self):
if self.storage_engine == "mmapv1":
return False
if self.version.at_least(4, 1, 8):
return self.is_mongos or self.is_replica_set
if self.version.at_least(4, 0):
return self.is_replica_set
return False
def require(self, condition, msg, func=None):
def make_wrapper(f):
@wraps(f)
def wrap(*args, **kwargs):
if condition():
return f(*args, **kwargs)
raise SkipTest(msg)
return wrap
if func is None:
def decorate(f):
return make_wrapper(f)
return decorate
return make_wrapper(func)
def require_auth(self, func):
"""Run a test only if the server is started with auth."""
return self.require(lambda: self.auth, "Server must be start with auth", func=func)
def require_version_min(self, *ver):
"""Run a test only if the server version is at least ``version``."""
other_version = Version(*ver)
return self.require(
lambda: self.version >= other_version,
"Server version must be at least %s" % str(other_version),
)
def require_version_max(self, *ver):
"""Run a test only if the server version is at most ``version``."""
other_version = Version(*ver)
return self.require(
lambda: self.version <= other_version,
"Server version must be at most %s" % str(other_version),
)
def require_replica_set(self, func):
"""Run a test only if the client is connected to a replica set."""
return self.require(
lambda: self.is_replica_set, "Not connected to a replica set", func=func
)
def require_transactions(self, func):
"""Run a test only if the deployment might support transactions.
*Might* because this does not test the FCV.
"""
return self.require(self.supports_transactions, "Transactions are not supported", func=func)
def create_user(self, dbname, user, pwd=None, roles=None, **kwargs):
kwargs["writeConcern"] = {"w": self.w}
return create_user(self.sync_cx[dbname], user, pwd, roles, **kwargs)
def drop_user(self, dbname, user):
self.sync_cx[dbname].command("dropUser", user, writeConcern={"w": self.w})
env = TestEnvironment()<|fim▁end|> | |
<|file_name|>_googleappengine.py<|end_file_name|><|fim▁begin|>"""HTTP related handlers.
Note that some other HTTP handlers live in more specific modules: _auth.py,
_gzip.py, etc.
Copyright 2002-2006 John J Lee <[email protected]>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
import time, htmlentitydefs, logging, \
fakesocket, urllib2, urllib, httplib, sgmllib
from urllib2 import URLError, HTTPError, BaseHandler
from cStringIO import StringIO
from _clientcookie import CookieJar
from _headersutil import is_html
from _html import unescape, unescape_charref
from _request import Request
from _response import closeable_response, response_seek_wrapper
import _rfc3986
import _sockettimeout
debug = logging.getLogger("mechanize").debug
debug_robots = logging.getLogger("mechanize.robots").debug
# monkeypatch urllib2.HTTPError to show URL
## def urllib2_str(self):
## return 'HTTP Error %s: %s (%s)' % (
## self.code, self.msg, self.geturl())
## urllib2.HTTPError.__str__ = urllib2_str
CHUNK = 1024 # size of chunks fed to HTML HEAD parser, in bytes
DEFAULT_ENCODING = 'latin-1'
#try:
# socket._fileobject("fake socket", close=True)
#except TypeError:
# python <= 2.4<|fim▁hole|>def create_readline_wrapper(fh):
return fakesocket._fileobject(fh, close=True)
# This adds "refresh" to the list of redirectables and provides a redirection
# algorithm that doesn't go into a loop in the presence of cookies
# (Python 2.4 has this new algorithm, 2.3 doesn't).
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
# Implementation notes:
# To avoid the server sending us into an infinite loop, the request
# object needs to track what URLs we have already seen. Do this by
# adding a handler-specific attribute to the Request object. The value
# of the dict is used to count the number of times the same URL has
# been visited. This is needed because visiting the same URL twice
# does not necessarily imply a loop, thanks to state introduced by
# cookies.
# Always unhandled redirection codes:
# 300 Multiple Choices: should not handle this here.
# 304 Not Modified: no need to handle here: only of interest to caches
# that do conditional GETs
# 305 Use Proxy: probably not worth dealing with here
# 306 Unused: what was this for in the previous versions of protocol??
def redirect_request(self, newurl, req, fp, code, msg, headers):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a redirection
response is received. If a redirection should take place, return a
new Request to allow http_error_30x to perform the redirect;
otherwise, return None to indicate that an HTTPError should be
raised.
"""
if code in (301, 302, 303, "refresh") or \
(code == 307 and not req.has_data()):
# Strictly (according to RFC 2616), 301 or 302 in response to
# a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we do
# the same.
# XXX really refresh redirections should be visiting; tricky to
# fix, so this will wait until post-stable release
new = Request(newurl,
headers=req.headers,
origin_req_host=req.get_origin_req_host(),
unverifiable=True,
visit=False,
)
new._origin_req = getattr(req, "_origin_req", req)
return new
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if headers.has_key('location'):
newurl = headers.getheaders('location')[0]
elif headers.has_key('uri'):
newurl = headers.getheaders('uri')[0]
else:
return
newurl = _rfc3986.clean_url(newurl, "latin-1")
newurl = _rfc3986.urljoin(req.get_full_url(), newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(newurl, req, fp, code, msg, headers)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new)
http_error_301 = http_error_303 = http_error_307 = http_error_302
http_error_refresh = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
# XXX would self.reset() work, instead of raising this exception?
class EndOfHeadError(Exception): pass
class AbstractHeadParser:
# only these elements are allowed in or before HEAD of document
head_elems = ("html", "head",
"title", "base",
"script", "style", "meta", "link", "object")
_entitydefs = htmlentitydefs.name2codepoint
_encoding = DEFAULT_ENCODING
def __init__(self):
self.http_equiv = []
def start_meta(self, attrs):
http_equiv = content = None
for key, value in attrs:
if key == "http-equiv":
http_equiv = self.unescape_attr_if_required(value)
elif key == "content":
content = self.unescape_attr_if_required(value)
if http_equiv is not None and content is not None:
self.http_equiv.append((http_equiv, content))
def end_head(self):
raise EndOfHeadError()
def handle_entityref(self, name):
#debug("%s", name)
self.handle_data(unescape(
'&%s;' % name, self._entitydefs, self._encoding))
def handle_charref(self, name):
#debug("%s", name)
self.handle_data(unescape_charref(name, self._encoding))
def unescape_attr(self, name):
#debug("%s", name)
return unescape(name, self._entitydefs, self._encoding)
def unescape_attrs(self, attrs):
#debug("%s", attrs)
escaped_attrs = {}
for key, val in attrs.items():
escaped_attrs[key] = self.unescape_attr(val)
return escaped_attrs
def unknown_entityref(self, ref):
self.handle_data("&%s;" % ref)
def unknown_charref(self, ref):
self.handle_data("&#%s;" % ref)
try:
import HTMLParser
except ImportError:
pass
else:
class XHTMLCompatibleHeadParser(AbstractHeadParser,
HTMLParser.HTMLParser):
def __init__(self):
HTMLParser.HTMLParser.__init__(self)
AbstractHeadParser.__init__(self)
def handle_starttag(self, tag, attrs):
if tag not in self.head_elems:
raise EndOfHeadError()
try:
method = getattr(self, 'start_' + tag)
except AttributeError:
try:
method = getattr(self, 'do_' + tag)
except AttributeError:
pass # unknown tag
else:
method(attrs)
else:
method(attrs)
def handle_endtag(self, tag):
if tag not in self.head_elems:
raise EndOfHeadError()
try:
method = getattr(self, 'end_' + tag)
except AttributeError:
pass # unknown tag
else:
method()
def unescape(self, name):
# Use the entitydefs passed into constructor, not
# HTMLParser.HTMLParser's entitydefs.
return self.unescape_attr(name)
def unescape_attr_if_required(self, name):
return name # HTMLParser.HTMLParser already did it
class HeadParser(AbstractHeadParser, sgmllib.SGMLParser):
def _not_called(self):
assert False
def __init__(self):
sgmllib.SGMLParser.__init__(self)
AbstractHeadParser.__init__(self)
def handle_starttag(self, tag, method, attrs):
if tag not in self.head_elems:
raise EndOfHeadError()
if tag == "meta":
method(attrs)
def unknown_starttag(self, tag, attrs):
self.handle_starttag(tag, self._not_called, attrs)
def handle_endtag(self, tag, method):
if tag in self.head_elems:
method()
else:
raise EndOfHeadError()
def unescape_attr_if_required(self, name):
return self.unescape_attr(name)
def parse_head(fileobj, parser):
"""Return a list of key, value pairs."""
while 1:
data = fileobj.read(CHUNK)
try:
parser.feed(data)
except EndOfHeadError:
break
if len(data) != CHUNK:
# this should only happen if there is no HTML body, or if
# CHUNK is big
break
return parser.http_equiv
class HTTPEquivProcessor(BaseHandler):
"""Append META HTTP-EQUIV headers to regular HTTP headers."""
handler_order = 300 # before handlers that look at HTTP headers
def __init__(self, head_parser_class=HeadParser,
i_want_broken_xhtml_support=False,
):
self.head_parser_class = head_parser_class
self._allow_xhtml = i_want_broken_xhtml_support
def http_response(self, request, response):
if not hasattr(response, "seek"):
response = response_seek_wrapper(response)
http_message = response.info()
url = response.geturl()
ct_hdrs = http_message.getheaders("content-type")
if is_html(ct_hdrs, url, self._allow_xhtml):
try:
try:
html_headers = parse_head(response,
self.head_parser_class())
finally:
response.seek(0)
except (HTMLParser.HTMLParseError,
sgmllib.SGMLParseError):
pass
else:
for hdr, val in html_headers:
# add a header
http_message.dict[hdr.lower()] = val
text = hdr + ": " + val
for line in text.split("\n"):
http_message.headers.append(line + "\n")
return response
https_response = http_response
class HTTPCookieProcessor(BaseHandler):
"""Handle HTTP cookies.
Public attributes:
cookiejar: CookieJar instance
"""
def __init__(self, cookiejar=None):
if cookiejar is None:
cookiejar = CookieJar()
self.cookiejar = cookiejar
def http_request(self, request):
self.cookiejar.add_cookie_header(request)
return request
def http_response(self, request, response):
self.cookiejar.extract_cookies(response, request)
return response
https_request = http_request
https_response = http_response
try:
import robotparser
except ImportError:
pass
else:
class MechanizeRobotFileParser(robotparser.RobotFileParser):
def __init__(self, url='', opener=None):
robotparser.RobotFileParser.__init__(self, url)
self._opener = opener
self._timeout = _sockettimeout._GLOBAL_DEFAULT_TIMEOUT
def set_opener(self, opener=None):
import _opener
if opener is None:
opener = _opener.OpenerDirector()
self._opener = opener
def set_timeout(self, timeout):
self._timeout = timeout
def read(self):
"""Reads the robots.txt URL and feeds it to the parser."""
if self._opener is None:
self.set_opener()
req = Request(self.url, unverifiable=True, visit=False,
timeout=self._timeout)
try:
f = self._opener.open(req)
except HTTPError, f:
pass
#except (IOError, socket.error, OSError), exc:
except (IOError, OSError), exc:
debug_robots("ignoring error opening %r: %s" %
(self.url, exc))
return
lines = []
line = f.readline()
while line:
lines.append(line.strip())
line = f.readline()
status = f.code
if status == 401 or status == 403:
self.disallow_all = True
debug_robots("disallow all")
elif status >= 400:
self.allow_all = True
debug_robots("allow all")
elif status == 200 and lines:
debug_robots("parse lines")
self.parse(lines)
class RobotExclusionError(urllib2.HTTPError):
def __init__(self, request, *args):
apply(urllib2.HTTPError.__init__, (self,)+args)
self.request = request
class HTTPRobotRulesProcessor(BaseHandler):
# before redirections, after everything else
handler_order = 800
try:
from httplib import HTTPMessage
except:
from mimetools import Message
http_response_class = Message
else:
http_response_class = HTTPMessage
def __init__(self, rfp_class=MechanizeRobotFileParser):
self.rfp_class = rfp_class
self.rfp = None
self._host = None
def http_request(self, request):
scheme = request.get_type()
if scheme not in ["http", "https"]:
# robots exclusion only applies to HTTP
return request
if request.get_selector() == "/robots.txt":
# /robots.txt is always OK to fetch
return request
host = request.get_host()
# robots.txt requests don't need to be allowed by robots.txt :-)
origin_req = getattr(request, "_origin_req", None)
if (origin_req is not None and
origin_req.get_selector() == "/robots.txt" and
origin_req.get_host() == host
):
return request
if host != self._host:
self.rfp = self.rfp_class()
try:
self.rfp.set_opener(self.parent)
except AttributeError:
debug("%r instance does not support set_opener" %
self.rfp.__class__)
self.rfp.set_url(scheme+"://"+host+"/robots.txt")
self.rfp.set_timeout(request.timeout)
self.rfp.read()
self._host = host
ua = request.get_header("User-agent", "")
if self.rfp.can_fetch(ua, request.get_full_url()):
return request
else:
# XXX This should really have raised URLError. Too late now...
msg = "request disallowed by robots.txt"
raise RobotExclusionError(
request,
request.get_full_url(),
403, msg,
self.http_response_class(StringIO()), StringIO(msg))
https_request = http_request
class HTTPRefererProcessor(BaseHandler):
"""Add Referer header to requests.
This only makes sense if you use each RefererProcessor for a single
chain of requests only (so, for example, if you use a single
HTTPRefererProcessor to fetch a series of URLs extracted from a single
page, this will break).
There's a proper implementation of this in mechanize.Browser.
"""
def __init__(self):
self.referer = None
def http_request(self, request):
if ((self.referer is not None) and
not request.has_header("Referer")):
request.add_unredirected_header("Referer", self.referer)
return request
def http_response(self, request, response):
self.referer = response.geturl()
return response
https_request = http_request
https_response = http_response
def clean_refresh_url(url):
# e.g. Firefox 1.5 does (something like) this
if ((url.startswith('"') and url.endswith('"')) or
(url.startswith("'") and url.endswith("'"))):
url = url[1:-1]
return _rfc3986.clean_url(url, "latin-1") # XXX encoding
def parse_refresh_header(refresh):
"""
>>> parse_refresh_header("1; url=http://example.com/")
(1.0, 'http://example.com/')
>>> parse_refresh_header("1; url='http://example.com/'")
(1.0, 'http://example.com/')
>>> parse_refresh_header("1")
(1.0, None)
>>> parse_refresh_header("blah")
Traceback (most recent call last):
ValueError: invalid literal for float(): blah
"""
ii = refresh.find(";")
if ii != -1:
pause, newurl_spec = float(refresh[:ii]), refresh[ii+1:]
jj = newurl_spec.find("=")
key = None
if jj != -1:
key, newurl = newurl_spec[:jj], newurl_spec[jj+1:]
newurl = clean_refresh_url(newurl)
if key is None or key.strip().lower() != "url":
raise ValueError()
else:
pause, newurl = float(refresh), None
return pause, newurl
class HTTPRefreshProcessor(BaseHandler):
"""Perform HTTP Refresh redirections.
Note that if a non-200 HTTP code has occurred (for example, a 30x
redirect), this processor will do nothing.
By default, only zero-time Refresh headers are redirected. Use the
max_time attribute / constructor argument to allow Refresh with longer
pauses. Use the honor_time attribute / constructor argument to control
whether the requested pause is honoured (with a time.sleep()) or
skipped in favour of immediate redirection.
Public attributes:
max_time: see above
honor_time: see above
"""
handler_order = 1000
def __init__(self, max_time=0, honor_time=True):
self.max_time = max_time
self.honor_time = honor_time
self._sleep = time.sleep
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
if code == 200 and hdrs.has_key("refresh"):
refresh = hdrs.getheaders("refresh")[0]
try:
pause, newurl = parse_refresh_header(refresh)
except ValueError:
debug("bad Refresh header: %r" % refresh)
return response
if newurl is None:
newurl = response.geturl()
if (self.max_time is None) or (pause <= self.max_time):
if pause > 1E-3 and self.honor_time:
self._sleep(pause)
hdrs["location"] = newurl
# hardcoded http is NOT a bug
response = self.parent.error(
"http", request, response,
"refresh", msg, hdrs)
else:
debug("Refresh header ignored: %r" % refresh)
return response
https_response = http_response
class HTTPErrorProcessor(BaseHandler):
"""Process HTTP error responses.
The purpose of this handler is to to allow other response processors a
look-in by removing the call to parent.error() from
AbstractHTTPHandler.
For non-200 error codes, this just passes the job on to the
Handler.<proto>_error_<code> methods, via the OpenerDirector.error
method. Eventually, urllib2.HTTPDefaultErrorHandler will raise an
HTTPError if no other handler handles the error.
"""
handler_order = 1000 # after all other processors
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
if code != 200:
# hardcoded http is NOT a bug
response = self.parent.error(
"http", request, response, code, msg, hdrs)
return response
https_response = http_response
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
# why these error methods took the code, msg, headers args in the first
# place rather than a response object, I don't know, but to avoid
# multiple wrapping, we're discarding them
if isinstance(fp, urllib2.HTTPError):
response = fp
else:
response = urllib2.HTTPError(
req.get_full_url(), code, msg, hdrs, fp)
assert code == response.code
assert msg == response.msg
assert hdrs == response.hdrs
raise response
class AbstractHTTPHandler(BaseHandler):
def __init__(self, debuglevel=0):
self._debuglevel = debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
def do_request_(self, request):
host = request.get_host()
if not host:
raise URLError('no host given')
if request.has_data(): # POST
data = request.get_data()
if not request.has_header('Content-type'):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length'):
request.add_unredirected_header(
'Content-length', '%d' % len(data))
scheme, sel = urllib.splittype(request.get_selector())
sel_host, sel_path = urllib.splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host or host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
return request
def do_open(self, http_class, req):
"""Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
host_port = req.get_host()
if not host_port:
raise URLError('no host given')
try:
h = http_class(host_port, timeout=req.timeout)
except TypeError:
# Python < 2.6, no per-connection timeout support
h = http_class(host_port)
h.set_debuglevel(self._debuglevel)
headers = dict(req.headers)
headers.update(req.unredirected_hdrs)
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict(
[(name.title(), val) for name, val in headers.items()])
try:
h.request(req.get_method(), req.get_selector(), req.data, headers)
r = h.getresponse()
#except socket.error, err: # XXX what error?
except (Exception), err: # XXX what error?
raise URLError(err)
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
# Wrap the HTTPResponse object in socket's file object adapter
# for Windows. That adapter calls recv(), so delegate recv()
# to read(). This weird wrapping allows the returned object to
# have readline() and readlines() methods.
# XXX It might be better to extract the read buffering code
# out of socket._fileobject() and into a base class.
r.recv = r.read
fp = create_readline_wrapper(r)
resp = closeable_response(fp, r.msg, req.get_full_url(),
r.status, r.reason)
return resp
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(httplib.HTTPConnection, req)
http_request = AbstractHTTPHandler.do_request_
if hasattr(httplib, 'HTTPS'):
class HTTPSConnectionFactory:
def __init__(self, key_file, cert_file):
self._key_file = key_file
self._cert_file = cert_file
def __call__(self, hostport):
return httplib.HTTPSConnection(
hostport,
key_file=self._key_file, cert_file=self._cert_file)
class HTTPSHandler(AbstractHTTPHandler):
def __init__(self, client_cert_manager=None):
AbstractHTTPHandler.__init__(self)
self.client_cert_manager = client_cert_manager
def https_open(self, req):
if self.client_cert_manager is not None:
key_file, cert_file = self.client_cert_manager.find_key_cert(
req.get_full_url())
conn_factory = HTTPSConnectionFactory(key_file, cert_file)
else:
conn_factory = httplib.HTTPSConnection
return self.do_open(conn_factory, req)
https_request = AbstractHTTPHandler.do_request_<|fim▁end|> | # create_readline_wrapper = socket._fileobject
#else: |
<|file_name|>check-import.ts<|end_file_name|><|fim▁begin|>import { Selector } from 'testcafe'
import { ROOT_URL, login } from '../e2e/utils'
// eslint-disable-next-line
fixture`imported data check`.beforeEach(async (t /*: TestController */) => {
await t.setNativeDialogHandler(() => true)
await t.navigateTo(`${ROOT_URL}/login`)
await login(t)
})
test('wait entry', async (t /*: TestController */) => {
await t<|fim▁hole|><|fim▁end|> | .expect(Selector('a').withText('Ask HN: single comment').exists)
.ok({ timeout: 20000 })
}) |
<|file_name|>backend_client.go<|end_file_name|><|fim▁begin|>package http
import (
gohttp "net/http"
"time"
)
func MakeBackendHttpClient(timeout time.Duration) *gohttp.Client {
return &gohttp.Client{
Timeout: time.Duration(timeout),
Transport: &gohttp.Transport{
DisableKeepAlives: true,<|fim▁hole|> },
}
}<|fim▁end|> | |
<|file_name|>localrepo.py<|end_file_name|><|fim▁begin|># localrepo.py - read/write repository class for mercurial
#
# Copyright 2005-2007 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import hex, nullid, short
from i18n import _
import peer, changegroup, subrepo, discovery, pushkey, obsolete, repoview
import changelog, dirstate, filelog, manifest, context, bookmarks, phases
import lock, transaction, store, encoding
import scmutil, util, extensions, hook, error, revset
import match as matchmod
import merge as mergemod
import tags as tagsmod
from lock import release
import weakref, errno, os, time, inspect
import branchmap
propertycache = util.propertycache
filecache = scmutil.filecache
class repofilecache(filecache):
"""All filecache usage on repo are done for logic that should be unfiltered
"""
def __get__(self, repo, type=None):
return super(repofilecache, self).__get__(repo.unfiltered(), type)
def __set__(self, repo, value):
return super(repofilecache, self).__set__(repo.unfiltered(), value)
def __delete__(self, repo):
return super(repofilecache, self).__delete__(repo.unfiltered())
class storecache(repofilecache):
"""filecache for files in the store"""
def join(self, obj, fname):
return obj.sjoin(fname)
class unfilteredpropertycache(propertycache):
"""propertycache that apply to unfiltered repo only"""
def __get__(self, repo, type=None):
return super(unfilteredpropertycache, self).__get__(repo.unfiltered())
class filteredpropertycache(propertycache):
"""propertycache that must take filtering in account"""
def cachevalue(self, obj, value):
object.__setattr__(obj, self.name, value)
def hasunfilteredcache(repo, name):
"""check if a repo has an unfilteredpropertycache value for <name>"""
return name in vars(repo.unfiltered())
def unfilteredmethod(orig):
"""decorate method that always need to be run on unfiltered version"""
def wrapper(repo, *args, **kwargs):
return orig(repo.unfiltered(), *args, **kwargs)
return wrapper
MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
class localpeer(peer.peerrepository):
'''peer for a local repo; reflects only the most recent API'''
def __init__(self, repo, caps=MODERNCAPS):
peer.peerrepository.__init__(self)
self._repo = repo.filtered('served')
self.ui = repo.ui
self._caps = repo._restrictcapabilities(caps)
self.requirements = repo.requirements
self.supportedformats = repo.supportedformats
def close(self):
self._repo.close()
def _capabilities(self):
return self._caps
def local(self):
return self._repo
def canpush(self):
return True
def url(self):
return self._repo.url()
def lookup(self, key):
return self._repo.lookup(key)
def branchmap(self):
return self._repo.branchmap()
def heads(self):
return self._repo.heads()
def known(self, nodes):
return self._repo.known(nodes)
def getbundle(self, source, heads=None, common=None, bundlecaps=None):
return self._repo.getbundle(source, heads=heads, common=common,
bundlecaps=None)
# TODO We might want to move the next two calls into legacypeer and add
# unbundle instead.
def lock(self):
return self._repo.lock()
def addchangegroup(self, cg, source, url):
return self._repo.addchangegroup(cg, source, url)
def pushkey(self, namespace, key, old, new):
return self._repo.pushkey(namespace, key, old, new)
def listkeys(self, namespace):
return self._repo.listkeys(namespace)
def debugwireargs(self, one, two, three=None, four=None, five=None):
'''used to test argument passing over the wire'''
return "%s %s %s %s %s" % (one, two, three, four, five)
class locallegacypeer(localpeer):
'''peer extension which implements legacy methods too; used for tests with
restricted capabilities'''
def __init__(self, repo):
localpeer.__init__(self, repo, caps=LEGACYCAPS)
def branches(self, nodes):
return self._repo.branches(nodes)
def between(self, pairs):
return self._repo.between(pairs)
def changegroup(self, basenodes, source):
return self._repo.changegroup(basenodes, source)
def changegroupsubset(self, bases, heads, source):
return self._repo.changegroupsubset(bases, heads, source)
class localrepository(object):
supportedformats = set(('revlogv1', 'generaldelta'))
supported = supportedformats | set(('store', 'fncache', 'shared',
'dotencode'))
openerreqs = set(('revlogv1', 'generaldelta'))
requirements = ['revlogv1']
filtername = None
def _baserequirements(self, create):
return self.requirements[:]
def __init__(self, baseui, path=None, create=False):
self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
self.wopener = self.wvfs
self.root = self.wvfs.base
self.path = self.wvfs.join(".hg")
self.origroot = path
self.auditor = scmutil.pathauditor(self.root, self._checknested)
self.vfs = scmutil.vfs(self.path)
self.opener = self.vfs
self.baseui = baseui
self.ui = baseui.copy()
# A list of callback to shape the phase if no data were found.
# Callback are in the form: func(repo, roots) --> processed root.
# This list it to be filled by extension during repo setup
self._phasedefaults = []
try:
self.ui.readconfig(self.join("hgrc"), self.root)
extensions.loadall(self.ui)
except IOError:
pass
if not self.vfs.isdir():
if create:
if not self.wvfs.exists():
self.wvfs.makedirs()
self.vfs.makedir(notindexed=True)
requirements = self._baserequirements(create)
if self.ui.configbool('format', 'usestore', True):
self.vfs.mkdir("store")
requirements.append("store")
if self.ui.configbool('format', 'usefncache', True):
requirements.append("fncache")
if self.ui.configbool('format', 'dotencode', True):
requirements.append('dotencode')
# create an invalid changelog
self.vfs.append(
"00changelog.i",
'\0\0\0\2' # represents revlogv2
' dummy changelog to prevent using the old repo layout'
)
if self.ui.configbool('format', 'generaldelta', False):
requirements.append("generaldelta")
requirements = set(requirements)
else:
raise error.RepoError(_("repository %s not found") % path)
elif create:
raise error.RepoError(_("repository %s already exists") % path)
else:
try:
requirements = scmutil.readrequires(self.vfs, self.supported)
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
requirements = set()
self.sharedpath = self.path
try:
vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
realpath=True)
s = vfs.base
if not vfs.exists():
raise error.RepoError(
_('.hg/sharedpath points to nonexistent directory %s') % s)
self.sharedpath = s
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
self.spath = self.store.path
self.svfs = self.store.vfs
self.sopener = self.svfs
self.sjoin = self.store.join
self.vfs.createmode = self.store.createmode
self._applyrequirements(requirements)
if create:
self._writerequirements()
self._branchcaches = {}
self.filterpats = {}
self._datafilters = {}
self._transref = self._lockref = self._wlockref = None
# A cache for various files under .hg/ that tracks file changes,
# (used by the filecache decorator)
#
# Maps a property name to its util.filecacheentry
self._filecache = {}
# hold sets of revision to be filtered
# should be cleared when something might have changed the filter value:
# - new changesets,
# - phase change,
# - new obsolescence marker,
# - working directory parent change,
# - bookmark changes
self.filteredrevcache = {}
def close(self):
pass
def _restrictcapabilities(self, caps):
return caps
def _applyrequirements(self, requirements):
self.requirements = requirements
self.sopener.options = dict((r, 1) for r in requirements
if r in self.openerreqs)
def _writerequirements(self):
reqfile = self.opener("requires", "w")
for r in sorted(self.requirements):
reqfile.write("%s\n" % r)
reqfile.close()
def _checknested(self, path):
"""Determine if path is a legal nested repository."""
if not path.startswith(self.root):
return False
subpath = path[len(self.root) + 1:]
normsubpath = util.pconvert(subpath)
# XXX: Checking against the current working copy is wrong in
# the sense that it can reject things like
#
# $ hg cat -r 10 sub/x.txt
#
# if sub/ is no longer a subrepository in the working copy
# parent revision.
#
# However, it can of course also allow things that would have
# been rejected before, such as the above cat command if sub/
# is a subrepository now, but was a normal directory before.
# The old path auditor would have rejected by mistake since it
# panics when it sees sub/.hg/.
#
# All in all, checking against the working copy seems sensible
# since we want to prevent access to nested repositories on
# the filesystem *now*.
ctx = self[None]
parts = util.splitpath(subpath)
while parts:
prefix = '/'.join(parts)
if prefix in ctx.substate:
if prefix == normsubpath:
return True
else:
sub = ctx.sub(prefix)
return sub.checknested(subpath[len(prefix) + 1:])
else:
parts.pop()
return False
def peer(self):
return localpeer(self) # not cached to avoid reference cycle
def unfiltered(self):
"""Return unfiltered version of the repository
Intended to be overwritten by filtered repo."""
return self
def filtered(self, name):
"""Return a filtered version of a repository"""
# build a new class with the mixin and the current class
# (possibly subclass of the repo)
class proxycls(repoview.repoview, self.unfiltered().__class__):
pass
return proxycls(self, name)
@repofilecache('bookmarks')
def _bookmarks(self):
return bookmarks.bmstore(self)
@repofilecache('bookmarks.current')
def _bookmarkcurrent(self):
return bookmarks.readcurrent(self)
def bookmarkheads(self, bookmark):
name = bookmark.split('@', 1)[0]
heads = []
for mark, n in self._bookmarks.iteritems():
if mark.split('@', 1)[0] == name:
heads.append(n)
return heads
@storecache('phaseroots')
def _phasecache(self):
return phases.phasecache(self, self._phasedefaults)
@storecache('obsstore')
def obsstore(self):
store = obsolete.obsstore(self.sopener)
if store and not obsolete._enabled:
# message is rare enough to not be translated
msg = 'obsolete feature not enabled but %i markers found!\n'
self.ui.warn(msg % len(list(store)))
return store
@storecache('00changelog.i')
def changelog(self):
c = changelog.changelog(self.sopener)
if 'HG_PENDING' in os.environ:
p = os.environ['HG_PENDING']
if p.startswith(self.root):
c.readpending('00changelog.i.a')
return c
@storecache('00manifest.i')
def manifest(self):
return manifest.manifest(self.sopener)
@repofilecache('dirstate')
def dirstate(self):
warned = [0]
def validate(node):
try:
self.changelog.rev(node)
return node
except error.LookupError:
if not warned[0]:
warned[0] = True
self.ui.warn(_("warning: ignoring unknown"
" working parent %s!\n") % short(node))
return nullid
return dirstate.dirstate(self.opener, self.ui, self.root, validate)
def __getitem__(self, changeid):
if changeid is None:
return context.workingctx(self)
return context.changectx(self, changeid)
def __contains__(self, changeid):
try:
return bool(self.lookup(changeid))
except error.RepoLookupError:
return False
def __nonzero__(self):
return True
def __len__(self):
return len(self.changelog)
def __iter__(self):
return iter(self.changelog)
def revs(self, expr, *args):
'''Return a list of revisions matching the given revset'''
expr = revset.formatspec(expr, *args)
m = revset.match(None, expr)
return [r for r in m(self, list(self))]
def set(self, expr, *args):
'''
Yield a context for each matching revision, after doing arg
replacement via revset.formatspec
'''
for r in self.revs(expr, *args):
yield self[r]
def url(self):
return 'file:' + self.root
def hook(self, name, throw=False, **args):
return hook.hook(self.ui, self, name, throw, **args)
@unfilteredmethod
def _tag(self, names, node, message, local, user, date, extra={}):
if isinstance(names, str):
names = (names,)
branches = self.branchmap()
for name in names:
self.hook('pretag', throw=True, node=hex(node), tag=name,
local=local)
if name in branches:
self.ui.warn(_("warning: tag %s conflicts with existing"
" branch name\n") % name)
def writetags(fp, names, munge, prevtags):
fp.seek(0, 2)
if prevtags and prevtags[-1] != '\n':
fp.write('\n')
for name in names:
m = munge and munge(name) or name
if (self._tagscache.tagtypes and
name in self._tagscache.tagtypes):
old = self.tags().get(name, nullid)
fp.write('%s %s\n' % (hex(old), m))
fp.write('%s %s\n' % (hex(node), m))
fp.close()
prevtags = ''
if local:
try:
fp = self.opener('localtags', 'r+')
except IOError:
fp = self.opener('localtags', 'a')
else:
prevtags = fp.read()
# local tags are stored in the current charset
writetags(fp, names, None, prevtags)
for name in names:
self.hook('tag', node=hex(node), tag=name, local=local)
return
try:
fp = self.wfile('.hgtags', 'rb+')
except IOError, e:
if e.errno != errno.ENOENT:
raise
fp = self.wfile('.hgtags', 'ab')
else:
prevtags = fp.read()
# committed tags are stored in UTF-8
writetags(fp, names, encoding.fromlocal, prevtags)
fp.close()
self.invalidatecaches()
if '.hgtags' not in self.dirstate:
self[None].add(['.hgtags'])
m = matchmod.exact(self.root, '', ['.hgtags'])
tagnode = self.commit(message, user, date, extra=extra, match=m)
for name in names:
self.hook('tag', node=hex(node), tag=name, local=local)
return tagnode
def tag(self, names, node, message, local, user, date):
'''tag a revision with one or more symbolic names.
names is a list of strings or, when adding a single tag, names may be a
string.
if local is True, the tags are stored in a per-repository file.
otherwise, they are stored in the .hgtags file, and a new
changeset is committed with the change.
keyword arguments:
local: whether to store tags in non-version-controlled file
(default False)
message: commit message to use if committing
user: name of user to use if committing
date: date tuple to use if committing'''
if not local:
for x in self.status()[:5]:
if '.hgtags' in x:
raise util.Abort(_('working copy of .hgtags is changed '
'(please commit .hgtags manually)'))
self.tags() # instantiate the cache
self._tag(names, node, message, local, user, date)
@filteredpropertycache
def _tagscache(self):
'''Returns a tagscache object that contains various tags related
caches.'''
# This simplifies its cache management by having one decorated
# function (this one) and the rest simply fetch things from it.
class tagscache(object):
def __init__(self):
# These two define the set of tags for this repository. tags
# maps tag name to node; tagtypes maps tag name to 'global' or
# 'local'. (Global tags are defined by .hgtags across all
# heads, and local tags are defined in .hg/localtags.)
# They constitute the in-memory cache of tags.
self.tags = self.tagtypes = None
self.nodetagscache = self.tagslist = None
cache = tagscache()
cache.tags, cache.tagtypes = self._findtags()
return cache
def tags(self):
'''return a mapping of tag to node'''
t = {}
if self.changelog.filteredrevs:
tags, tt = self._findtags()
else:
tags = self._tagscache.tags
for k, v in tags.iteritems():
try:
# ignore tags to unknown nodes
self.changelog.rev(v)
t[k] = v
except (error.LookupError, ValueError):
pass
return t
def _findtags(self):
'''Do the hard work of finding tags. Return a pair of dicts
(tags, tagtypes) where tags maps tag name to node, and tagtypes
maps tag name to a string like \'global\' or \'local\'.
Subclasses or extensions are free to add their own tags, but
should be aware that the returned dicts will be retained for the
duration of the localrepo object.'''
# XXX what tagtype should subclasses/extensions use? Currently
# mq and bookmarks add tags, but do not set the tagtype at all.
# Should each extension invent its own tag type? Should there
# be one tagtype for all such "virtual" tags? Or is the status
# quo fine?
alltags = {} # map tag name to (node, hist)
tagtypes = {}
tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
# Build the return dicts. Have to re-encode tag names because
# the tags module always uses UTF-8 (in order not to lose info
# writing to the cache), but the rest of Mercurial wants them in
# local encoding.
tags = {}
for (name, (node, hist)) in alltags.iteritems():
if node != nullid:
tags[encoding.tolocal(name)] = node
tags['tip'] = self.changelog.tip()
tagtypes = dict([(encoding.tolocal(name), value)
for (name, value) in tagtypes.iteritems()])
return (tags, tagtypes)
def tagtype(self, tagname):
'''
return the type of the given tag. result can be:
'local' : a local tag
'global' : a global tag
None : tag does not exist
'''
return self._tagscache.tagtypes.get(tagname)
def tagslist(self):
'''return a list of tags ordered by revision'''
if not self._tagscache.tagslist:
l = []
for t, n in self.tags().iteritems():
r = self.changelog.rev(n)
l.append((r, t, n))
self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
return self._tagscache.tagslist
def nodetags(self, node):
'''return the tags associated with a node'''
if not self._tagscache.nodetagscache:
nodetagscache = {}
for t, n in self._tagscache.tags.iteritems():
nodetagscache.setdefault(n, []).append(t)
for tags in nodetagscache.itervalues():
tags.sort()
self._tagscache.nodetagscache = nodetagscache
return self._tagscache.nodetagscache.get(node, [])
def nodebookmarks(self, node):
marks = []
for bookmark, n in self._bookmarks.iteritems():
if n == node:
marks.append(bookmark)
return sorted(marks)
def branchmap(self):
'''returns a dictionary {branch: [branchheads]}'''
branchmap.updatecache(self)
return self._branchcaches[self.filtername]
def _branchtip(self, heads):
'''return the tipmost branch head in heads'''
tip = heads[-1]
for h in reversed(heads):
if not self[h].closesbranch():
tip = h
break
return tip
def branchtip(self, branch):
'''return the tip node for a given branch'''
if branch not in self.branchmap():
raise error.RepoLookupError(_("unknown branch '%s'") % branch)
return self._branchtip(self.branchmap()[branch])
def branchtags(self):
'''return a dict where branch names map to the tipmost head of
the branch, open heads come before closed'''
bt = {}
for bn, heads in self.branchmap().iteritems():
bt[bn] = self._branchtip(heads)
return bt
def lookup(self, key):
return self[key].node()
def lookupbranch(self, key, remote=None):
repo = remote or self
if key in repo.branchmap():
return key
repo = (remote and remote.local()) and remote or self
return repo[key].branch()
def known(self, nodes):
nm = self.changelog.nodemap
pc = self._phasecache
result = []
for n in nodes:
r = nm.get(n)
resp = not (r is None or pc.phase(self, r) >= phases.secret)
result.append(resp)
return result
def local(self):
return self
def cancopy(self):<|fim▁hole|> def join(self, f):
return os.path.join(self.path, f)
def wjoin(self, f):
return os.path.join(self.root, f)
def file(self, f):
if f[0] == '/':
f = f[1:]
return filelog.filelog(self.sopener, f)
def changectx(self, changeid):
return self[changeid]
def parents(self, changeid=None):
'''get list of changectxs for parents of changeid'''
return self[changeid].parents()
def setparents(self, p1, p2=nullid):
copies = self.dirstate.setparents(p1, p2)
pctx = self[p1]
if copies:
# Adjust copy records, the dirstate cannot do it, it
# requires access to parents manifests. Preserve them
# only for entries added to first parent.
for f in copies:
if f not in pctx and copies[f] in pctx:
self.dirstate.copy(copies[f], f)
if p2 == nullid:
for f, s in sorted(self.dirstate.copies().items()):
if f not in pctx and s not in pctx:
self.dirstate.copy(None, f)
def filectx(self, path, changeid=None, fileid=None):
"""changeid can be a changeset revision, node, or tag.
fileid can be a file revision or node."""
return context.filectx(self, path, changeid, fileid)
def getcwd(self):
return self.dirstate.getcwd()
def pathto(self, f, cwd=None):
return self.dirstate.pathto(f, cwd)
def wfile(self, f, mode='r'):
return self.wopener(f, mode)
def _link(self, f):
return self.wvfs.islink(f)
def _loadfilter(self, filter):
if filter not in self.filterpats:
l = []
for pat, cmd in self.ui.configitems(filter):
if cmd == '!':
continue
mf = matchmod.match(self.root, '', [pat])
fn = None
params = cmd
for name, filterfn in self._datafilters.iteritems():
if cmd.startswith(name):
fn = filterfn
params = cmd[len(name):].lstrip()
break
if not fn:
fn = lambda s, c, **kwargs: util.filter(s, c)
# Wrap old filters not supporting keyword arguments
if not inspect.getargspec(fn)[2]:
oldfn = fn
fn = lambda s, c, **kwargs: oldfn(s, c)
l.append((mf, fn, params))
self.filterpats[filter] = l
return self.filterpats[filter]
def _filter(self, filterpats, filename, data):
for mf, fn, cmd in filterpats:
if mf(filename):
self.ui.debug("filtering %s through %s\n" % (filename, cmd))
data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
break
return data
@unfilteredpropertycache
def _encodefilterpats(self):
return self._loadfilter('encode')
@unfilteredpropertycache
def _decodefilterpats(self):
return self._loadfilter('decode')
def adddatafilter(self, name, filter):
self._datafilters[name] = filter
def wread(self, filename):
if self._link(filename):
data = self.wvfs.readlink(filename)
else:
data = self.wopener.read(filename)
return self._filter(self._encodefilterpats, filename, data)
def wwrite(self, filename, data, flags):
data = self._filter(self._decodefilterpats, filename, data)
if 'l' in flags:
self.wopener.symlink(data, filename)
else:
self.wopener.write(filename, data)
if 'x' in flags:
self.wvfs.setflags(filename, False, True)
def wwritedata(self, filename, data):
return self._filter(self._decodefilterpats, filename, data)
def transaction(self, desc):
tr = self._transref and self._transref() or None
if tr and tr.running():
return tr.nest()
# abort here if the journal already exists
if self.svfs.exists("journal"):
raise error.RepoError(
_("abandoned transaction found - run hg recover"))
self._writejournal(desc)
renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
tr = transaction.transaction(self.ui.warn, self.sopener,
self.sjoin("journal"),
aftertrans(renames),
self.store.createmode)
self._transref = weakref.ref(tr)
return tr
def _journalfiles(self):
return ((self.svfs, 'journal'),
(self.vfs, 'journal.dirstate'),
(self.vfs, 'journal.branch'),
(self.vfs, 'journal.desc'),
(self.vfs, 'journal.bookmarks'),
(self.svfs, 'journal.phaseroots'))
def undofiles(self):
return [vfs.join(undoname(x)) for vfs, x in self._journalfiles()]
def _writejournal(self, desc):
self.opener.write("journal.dirstate",
self.opener.tryread("dirstate"))
self.opener.write("journal.branch",
encoding.fromlocal(self.dirstate.branch()))
self.opener.write("journal.desc",
"%d\n%s\n" % (len(self), desc))
self.opener.write("journal.bookmarks",
self.opener.tryread("bookmarks"))
self.sopener.write("journal.phaseroots",
self.sopener.tryread("phaseroots"))
def recover(self):
lock = self.lock()
try:
if self.svfs.exists("journal"):
self.ui.status(_("rolling back interrupted transaction\n"))
transaction.rollback(self.sopener, self.sjoin("journal"),
self.ui.warn)
self.invalidate()
return True
else:
self.ui.warn(_("no interrupted transaction available\n"))
return False
finally:
lock.release()
def rollback(self, dryrun=False, force=False):
wlock = lock = None
try:
wlock = self.wlock()
lock = self.lock()
if self.svfs.exists("undo"):
return self._rollback(dryrun, force)
else:
self.ui.warn(_("no rollback information available\n"))
return 1
finally:
release(lock, wlock)
@unfilteredmethod # Until we get smarter cache management
def _rollback(self, dryrun, force):
ui = self.ui
try:
args = self.opener.read('undo.desc').splitlines()
(oldlen, desc, detail) = (int(args[0]), args[1], None)
if len(args) >= 3:
detail = args[2]
oldtip = oldlen - 1
if detail and ui.verbose:
msg = (_('repository tip rolled back to revision %s'
' (undo %s: %s)\n')
% (oldtip, desc, detail))
else:
msg = (_('repository tip rolled back to revision %s'
' (undo %s)\n')
% (oldtip, desc))
except IOError:
msg = _('rolling back unknown transaction\n')
desc = None
if not force and self['.'] != self['tip'] and desc == 'commit':
raise util.Abort(
_('rollback of last commit while not checked out '
'may lose data'), hint=_('use -f to force'))
ui.status(msg)
if dryrun:
return 0
parents = self.dirstate.parents()
self.destroying()
transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
if self.vfs.exists('undo.bookmarks'):
self.vfs.rename('undo.bookmarks', 'bookmarks')
if self.svfs.exists('undo.phaseroots'):
self.svfs.rename('undo.phaseroots', 'phaseroots')
self.invalidate()
parentgone = (parents[0] not in self.changelog.nodemap or
parents[1] not in self.changelog.nodemap)
if parentgone:
self.vfs.rename('undo.dirstate', 'dirstate')
try:
branch = self.opener.read('undo.branch')
self.dirstate.setbranch(encoding.tolocal(branch))
except IOError:
ui.warn(_('named branch could not be reset: '
'current branch is still \'%s\'\n')
% self.dirstate.branch())
self.dirstate.invalidate()
parents = tuple([p.rev() for p in self.parents()])
if len(parents) > 1:
ui.status(_('working directory now based on '
'revisions %d and %d\n') % parents)
else:
ui.status(_('working directory now based on '
'revision %d\n') % parents)
# TODO: if we know which new heads may result from this rollback, pass
# them to destroy(), which will prevent the branchhead cache from being
# invalidated.
self.destroyed()
return 0
def invalidatecaches(self):
if '_tagscache' in vars(self):
# can't use delattr on proxy
del self.__dict__['_tagscache']
self.unfiltered()._branchcaches.clear()
self.invalidatevolatilesets()
def invalidatevolatilesets(self):
self.filteredrevcache.clear()
obsolete.clearobscaches(self)
def invalidatedirstate(self):
'''Invalidates the dirstate, causing the next call to dirstate
to check if it was modified since the last time it was read,
rereading it if it has.
This is different to dirstate.invalidate() that it doesn't always
rereads the dirstate. Use dirstate.invalidate() if you want to
explicitly read the dirstate again (i.e. restoring it to a previous
known good state).'''
if hasunfilteredcache(self, 'dirstate'):
for k in self.dirstate._filecache:
try:
delattr(self.dirstate, k)
except AttributeError:
pass
delattr(self.unfiltered(), 'dirstate')
def invalidate(self):
unfiltered = self.unfiltered() # all file caches are stored unfiltered
for k in self._filecache:
# dirstate is invalidated separately in invalidatedirstate()
if k == 'dirstate':
continue
try:
delattr(unfiltered, k)
except AttributeError:
pass
self.invalidatecaches()
def _lock(self, lockname, wait, releasefn, acquirefn, desc):
try:
l = lock.lock(lockname, 0, releasefn, desc=desc)
except error.LockHeld, inst:
if not wait:
raise
self.ui.warn(_("waiting for lock on %s held by %r\n") %
(desc, inst.locker))
# default to 600 seconds timeout
l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
releasefn, desc=desc)
if acquirefn:
acquirefn()
return l
def _afterlock(self, callback):
"""add a callback to the current repository lock.
The callback will be executed on lock release."""
l = self._lockref and self._lockref()
if l:
l.postrelease.append(callback)
else:
callback()
def lock(self, wait=True):
'''Lock the repository store (.hg/store) and return a weak reference
to the lock. Use this before modifying the store (e.g. committing or
stripping). If you are opening a transaction, get a lock as well.)'''
l = self._lockref and self._lockref()
if l is not None and l.held:
l.lock()
return l
def unlock():
self.store.write()
if hasunfilteredcache(self, '_phasecache'):
self._phasecache.write()
for k, ce in self._filecache.items():
if k == 'dirstate' or k not in self.__dict__:
continue
ce.refresh()
l = self._lock(self.sjoin("lock"), wait, unlock,
self.invalidate, _('repository %s') % self.origroot)
self._lockref = weakref.ref(l)
return l
def wlock(self, wait=True):
'''Lock the non-store parts of the repository (everything under
.hg except .hg/store) and return a weak reference to the lock.
Use this before modifying files in .hg.'''
l = self._wlockref and self._wlockref()
if l is not None and l.held:
l.lock()
return l
def unlock():
self.dirstate.write()
self._filecache['dirstate'].refresh()
l = self._lock(self.join("wlock"), wait, unlock,
self.invalidatedirstate, _('working directory of %s') %
self.origroot)
self._wlockref = weakref.ref(l)
return l
def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
"""
commit an individual file as part of a larger transaction
"""
fname = fctx.path()
text = fctx.data()
flog = self.file(fname)
fparent1 = manifest1.get(fname, nullid)
fparent2 = fparent2o = manifest2.get(fname, nullid)
meta = {}
copy = fctx.renamed()
if copy and copy[0] != fname:
# Mark the new revision of this file as a copy of another
# file. This copy data will effectively act as a parent
# of this new revision. If this is a merge, the first
# parent will be the nullid (meaning "look up the copy data")
# and the second one will be the other parent. For example:
#
# 0 --- 1 --- 3 rev1 changes file foo
# \ / rev2 renames foo to bar and changes it
# \- 2 -/ rev3 should have bar with all changes and
# should record that bar descends from
# bar in rev2 and foo in rev1
#
# this allows this merge to succeed:
#
# 0 --- 1 --- 3 rev4 reverts the content change from rev2
# \ / merging rev3 and rev4 should use bar@rev2
# \- 2 --- 4 as the merge base
#
cfname = copy[0]
crev = manifest1.get(cfname)
newfparent = fparent2
if manifest2: # branch merge
if fparent2 == nullid or crev is None: # copied on remote side
if cfname in manifest2:
crev = manifest2[cfname]
newfparent = fparent1
# find source in nearest ancestor if we've lost track
if not crev:
self.ui.debug(" %s: searching for copy revision for %s\n" %
(fname, cfname))
for ancestor in self[None].ancestors():
if cfname in ancestor:
crev = ancestor[cfname].filenode()
break
if crev:
self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
meta["copy"] = cfname
meta["copyrev"] = hex(crev)
fparent1, fparent2 = nullid, newfparent
else:
self.ui.warn(_("warning: can't find ancestor for '%s' "
"copied from '%s'!\n") % (fname, cfname))
elif fparent2 != nullid:
# is one parent an ancestor of the other?
fparentancestor = flog.ancestor(fparent1, fparent2)
if fparentancestor == fparent1:
fparent1, fparent2 = fparent2, nullid
elif fparentancestor == fparent2:
fparent2 = nullid
# is the file changed?
if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
changelist.append(fname)
return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
# are just the flags changed during merge?
if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
changelist.append(fname)
return fparent1
@unfilteredmethod
def commit(self, text="", user=None, date=None, match=None, force=False,
editor=False, extra={}):
"""Add a new revision to current repository.
Revision information is gathered from the working directory,
match can be used to filter the committed files. If editor is
supplied, it is called to get a commit message.
"""
def fail(f, msg):
raise util.Abort('%s: %s' % (f, msg))
if not match:
match = matchmod.always(self.root, '')
if not force:
vdirs = []
match.explicitdir = vdirs.append
match.bad = fail
wlock = self.wlock()
try:
wctx = self[None]
merge = len(wctx.parents()) > 1
if (not force and merge and match and
(match.files() or match.anypats())):
raise util.Abort(_('cannot partially commit a merge '
'(do not specify files or patterns)'))
changes = self.status(match=match, clean=force)
if force:
changes[0].extend(changes[6]) # mq may commit unchanged files
# check subrepos
subs = []
commitsubs = set()
newstate = wctx.substate.copy()
# only manage subrepos and .hgsubstate if .hgsub is present
if '.hgsub' in wctx:
# we'll decide whether to track this ourselves, thanks
if '.hgsubstate' in changes[0]:
changes[0].remove('.hgsubstate')
if '.hgsubstate' in changes[2]:
changes[2].remove('.hgsubstate')
# compare current state to last committed state
# build new substate based on last committed state
oldstate = wctx.p1().substate
for s in sorted(newstate.keys()):
if not match(s):
# ignore working copy, use old state if present
if s in oldstate:
newstate[s] = oldstate[s]
continue
if not force:
raise util.Abort(
_("commit with new subrepo %s excluded") % s)
if wctx.sub(s).dirty(True):
if not self.ui.configbool('ui', 'commitsubrepos'):
raise util.Abort(
_("uncommitted changes in subrepo %s") % s,
hint=_("use --subrepos for recursive commit"))
subs.append(s)
commitsubs.add(s)
else:
bs = wctx.sub(s).basestate()
newstate[s] = (newstate[s][0], bs, newstate[s][2])
if oldstate.get(s, (None, None, None))[1] != bs:
subs.append(s)
# check for removed subrepos
for p in wctx.parents():
r = [s for s in p.substate if s not in newstate]
subs += [s for s in r if match(s)]
if subs:
if (not match('.hgsub') and
'.hgsub' in (wctx.modified() + wctx.added())):
raise util.Abort(
_("can't commit subrepos without .hgsub"))
changes[0].insert(0, '.hgsubstate')
elif '.hgsub' in changes[2]:
# clean up .hgsubstate when .hgsub is removed
if ('.hgsubstate' in wctx and
'.hgsubstate' not in changes[0] + changes[1] + changes[2]):
changes[2].insert(0, '.hgsubstate')
# make sure all explicit patterns are matched
if not force and match.files():
matched = set(changes[0] + changes[1] + changes[2])
for f in match.files():
f = self.dirstate.normalize(f)
if f == '.' or f in matched or f in wctx.substate:
continue
if f in changes[3]: # missing
fail(f, _('file not found!'))
if f in vdirs: # visited directory
d = f + '/'
for mf in matched:
if mf.startswith(d):
break
else:
fail(f, _("no match under directory!"))
elif f not in self.dirstate:
fail(f, _("file not tracked!"))
cctx = context.workingctx(self, text, user, date, extra, changes)
if (not force and not extra.get("close") and not merge
and not cctx.files()
and wctx.branch() == wctx.p1().branch()):
return None
if merge and cctx.deleted():
raise util.Abort(_("cannot commit merge with missing files"))
ms = mergemod.mergestate(self)
for f in changes[0]:
if f in ms and ms[f] == 'u':
raise util.Abort(_("unresolved merge conflicts "
"(see hg help resolve)"))
if editor:
cctx._text = editor(self, cctx, subs)
edited = (text != cctx._text)
# commit subs and write new state
if subs:
for s in sorted(commitsubs):
sub = wctx.sub(s)
self.ui.status(_('committing subrepository %s\n') %
subrepo.subrelpath(sub))
sr = sub.commit(cctx._text, user, date)
newstate[s] = (newstate[s][0], sr)
subrepo.writestate(self, newstate)
# Save commit message in case this transaction gets rolled back
# (e.g. by a pretxncommit hook). Leave the content alone on
# the assumption that the user will use the same editor again.
msgfn = self.savecommitmessage(cctx._text)
p1, p2 = self.dirstate.parents()
hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
try:
self.hook("precommit", throw=True, parent1=hookp1,
parent2=hookp2)
ret = self.commitctx(cctx, True)
except: # re-raises
if edited:
self.ui.write(
_('note: commit message saved in %s\n') % msgfn)
raise
# update bookmarks, dirstate and mergestate
bookmarks.update(self, [p1, p2], ret)
cctx.markcommitted(ret)
ms.reset()
finally:
wlock.release()
def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
self.hook("commit", node=node, parent1=parent1, parent2=parent2)
self._afterlock(commithook)
return ret
@unfilteredmethod
def commitctx(self, ctx, error=False):
"""Add a new revision to current repository.
Revision information is passed via the context argument.
"""
tr = lock = None
removed = list(ctx.removed())
p1, p2 = ctx.p1(), ctx.p2()
user = ctx.user()
lock = self.lock()
try:
tr = self.transaction("commit")
trp = weakref.proxy(tr)
if ctx.files():
m1 = p1.manifest().copy()
m2 = p2.manifest()
# check in files
new = {}
changed = []
linkrev = len(self)
for f in sorted(ctx.modified() + ctx.added()):
self.ui.note(f + "\n")
try:
fctx = ctx[f]
new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
changed)
m1.set(f, fctx.flags())
except OSError, inst:
self.ui.warn(_("trouble committing %s!\n") % f)
raise
except IOError, inst:
errcode = getattr(inst, 'errno', errno.ENOENT)
if error or errcode and errcode != errno.ENOENT:
self.ui.warn(_("trouble committing %s!\n") % f)
raise
else:
removed.append(f)
# update manifest
m1.update(new)
removed = [f for f in sorted(removed) if f in m1 or f in m2]
drop = [f for f in removed if f in m1]
for f in drop:
del m1[f]
mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
p2.manifestnode(), (new, drop))
files = changed + removed
else:
mn = p1.manifestnode()
files = []
# update changelog
self.changelog.delayupdate()
n = self.changelog.add(mn, files, ctx.description(),
trp, p1.node(), p2.node(),
user, ctx.date(), ctx.extra().copy())
p = lambda: self.changelog.writepending() and self.root or ""
xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
parent2=xp2, pending=p)
self.changelog.finalize(trp)
# set the new commit is proper phase
targetphase = phases.newcommitphase(self.ui)
if targetphase:
# retract boundary do not alter parent changeset.
# if a parent have higher the resulting phase will
# be compliant anyway
#
# if minimal phase was 0 we don't need to retract anything
phases.retractboundary(self, targetphase, [n])
tr.close()
branchmap.updatecache(self.filtered('served'))
return n
finally:
if tr:
tr.release()
lock.release()
@unfilteredmethod
def destroying(self):
'''Inform the repository that nodes are about to be destroyed.
Intended for use by strip and rollback, so there's a common
place for anything that has to be done before destroying history.
This is mostly useful for saving state that is in memory and waiting
to be flushed when the current lock is released. Because a call to
destroyed is imminent, the repo will be invalidated causing those
changes to stay in memory (waiting for the next unlock), or vanish
completely.
'''
# When using the same lock to commit and strip, the phasecache is left
# dirty after committing. Then when we strip, the repo is invalidated,
# causing those changes to disappear.
if '_phasecache' in vars(self):
self._phasecache.write()
@unfilteredmethod
def destroyed(self):
'''Inform the repository that nodes have been destroyed.
Intended for use by strip and rollback, so there's a common
place for anything that has to be done after destroying history.
'''
# When one tries to:
# 1) destroy nodes thus calling this method (e.g. strip)
# 2) use phasecache somewhere (e.g. commit)
#
# then 2) will fail because the phasecache contains nodes that were
# removed. We can either remove phasecache from the filecache,
# causing it to reload next time it is accessed, or simply filter
# the removed nodes now and write the updated cache.
self._phasecache.filterunknown(self)
self._phasecache.write()
# update the 'served' branch cache to help read only server process
# Thanks to branchcache collaboration this is done from the nearest
# filtered subset and it is expected to be fast.
branchmap.updatecache(self.filtered('served'))
# Ensure the persistent tag cache is updated. Doing it now
# means that the tag cache only has to worry about destroyed
# heads immediately after a strip/rollback. That in turn
# guarantees that "cachetip == currenttip" (comparing both rev
# and node) always means no nodes have been added or destroyed.
# XXX this is suboptimal when qrefresh'ing: we strip the current
# head, refresh the tag cache, then immediately add a new head.
# But I think doing it this way is necessary for the "instant
# tag cache retrieval" case to work.
self.invalidate()
def walk(self, match, node=None):
'''
walk recursively through the directory tree or a given
changeset, finding all files matched by the match
function
'''
return self[node].walk(match)
def status(self, node1='.', node2=None, match=None,
ignored=False, clean=False, unknown=False,
listsubrepos=False):
"""return status of files between two nodes or node and working
directory.
If node1 is None, use the first dirstate parent instead.
If node2 is None, compare node1 with working directory.
"""
def mfmatches(ctx):
mf = ctx.manifest().copy()
if match.always():
return mf
for fn in mf.keys():
if not match(fn):
del mf[fn]
return mf
if isinstance(node1, context.changectx):
ctx1 = node1
else:
ctx1 = self[node1]
if isinstance(node2, context.changectx):
ctx2 = node2
else:
ctx2 = self[node2]
working = ctx2.rev() is None
parentworking = working and ctx1 == self['.']
match = match or matchmod.always(self.root, self.getcwd())
listignored, listclean, listunknown = ignored, clean, unknown
# load earliest manifest first for caching reasons
if not working and ctx2.rev() < ctx1.rev():
ctx2.manifest()
if not parentworking:
def bad(f, msg):
# 'f' may be a directory pattern from 'match.files()',
# so 'f not in ctx1' is not enough
if f not in ctx1 and f not in ctx1.dirs():
self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
match.bad = bad
if working: # we need to scan the working dir
subrepos = []
if '.hgsub' in self.dirstate:
subrepos = sorted(ctx2.substate)
s = self.dirstate.status(match, subrepos, listignored,
listclean, listunknown)
cmp, modified, added, removed, deleted, unknown, ignored, clean = s
# check for any possibly clean files
if parentworking and cmp:
fixup = []
# do a full compare of any files that might have changed
for f in sorted(cmp):
if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
or ctx1[f].cmp(ctx2[f])):
modified.append(f)
else:
fixup.append(f)
# update dirstate for files that are actually clean
if fixup:
if listclean:
clean += fixup
try:
# updating the dirstate is optional
# so we don't wait on the lock
wlock = self.wlock(False)
try:
for f in fixup:
self.dirstate.normal(f)
finally:
wlock.release()
except error.LockError:
pass
if not parentworking:
mf1 = mfmatches(ctx1)
if working:
# we are comparing working dir against non-parent
# generate a pseudo-manifest for the working dir
mf2 = mfmatches(self['.'])
for f in cmp + modified + added:
mf2[f] = None
mf2.set(f, ctx2.flags(f))
for f in removed:
if f in mf2:
del mf2[f]
else:
# we are comparing two revisions
deleted, unknown, ignored = [], [], []
mf2 = mfmatches(ctx2)
modified, added, clean = [], [], []
withflags = mf1.withflags() | mf2.withflags()
for fn, mf2node in mf2.iteritems():
if fn in mf1:
if (fn not in deleted and
((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
(mf1[fn] != mf2node and
(mf2node or ctx1[fn].cmp(ctx2[fn]))))):
modified.append(fn)
elif listclean:
clean.append(fn)
del mf1[fn]
elif fn not in deleted:
added.append(fn)
removed = mf1.keys()
if working and modified and not self.dirstate._checklink:
# Symlink placeholders may get non-symlink-like contents
# via user error or dereferencing by NFS or Samba servers,
# so we filter out any placeholders that don't look like a
# symlink
sane = []
for f in modified:
if ctx2.flags(f) == 'l':
d = ctx2[f].data()
if len(d) >= 1024 or '\n' in d or util.binary(d):
self.ui.debug('ignoring suspect symlink placeholder'
' "%s"\n' % f)
continue
sane.append(f)
modified = sane
r = modified, added, removed, deleted, unknown, ignored, clean
if listsubrepos:
for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
if working:
rev2 = None
else:
rev2 = ctx2.substate[subpath][1]
try:
submatch = matchmod.narrowmatcher(subpath, match)
s = sub.status(rev2, match=submatch, ignored=listignored,
clean=listclean, unknown=listunknown,
listsubrepos=True)
for rfiles, sfiles in zip(r, s):
rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
except error.LookupError:
self.ui.status(_("skipping missing subrepository: %s\n")
% subpath)
for l in r:
l.sort()
return r
def heads(self, start=None):
heads = self.changelog.heads(start)
# sort the output in rev descending order
return sorted(heads, key=self.changelog.rev, reverse=True)
def branchheads(self, branch=None, start=None, closed=False):
'''return a (possibly filtered) list of heads for the given branch
Heads are returned in topological order, from newest to oldest.
If branch is None, use the dirstate branch.
If start is not None, return only heads reachable from start.
If closed is True, return heads that are marked as closed as well.
'''
if branch is None:
branch = self[None].branch()
branches = self.branchmap()
if branch not in branches:
return []
# the cache returns heads ordered lowest to highest
bheads = list(reversed(branches[branch]))
if start is not None:
# filter out the heads that cannot be reached from startrev
fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
bheads = [h for h in bheads if h in fbheads]
if not closed:
bheads = [h for h in bheads if not self[h].closesbranch()]
return bheads
def branches(self, nodes):
if not nodes:
nodes = [self.changelog.tip()]
b = []
for n in nodes:
t = n
while True:
p = self.changelog.parents(n)
if p[1] != nullid or p[0] == nullid:
b.append((t, n, p[0], p[1]))
break
n = p[0]
return b
def between(self, pairs):
r = []
for top, bottom in pairs:
n, l, i = top, [], 0
f = 1
while n != bottom and n != nullid:
p = self.changelog.parents(n)[0]
if i == f:
l.append(n)
f = f * 2
n = p
i += 1
r.append(l)
return r
def pull(self, remote, heads=None, force=False):
# don't open transaction for nothing or you break future useful
# rollback call
tr = None
trname = 'pull\n' + util.hidepassword(remote.url())
lock = self.lock()
try:
tmp = discovery.findcommonincoming(self, remote, heads=heads,
force=force)
common, fetch, rheads = tmp
if not fetch:
self.ui.status(_("no changes found\n"))
added = []
result = 0
else:
tr = self.transaction(trname)
if heads is None and list(common) == [nullid]:
self.ui.status(_("requesting all changes\n"))
elif heads is None and remote.capable('changegroupsubset'):
# issue1320, avoid a race if remote changed after discovery
heads = rheads
if remote.capable('getbundle'):
# TODO: get bundlecaps from remote
cg = remote.getbundle('pull', common=common,
heads=heads or rheads)
elif heads is None:
cg = remote.changegroup(fetch, 'pull')
elif not remote.capable('changegroupsubset'):
raise util.Abort(_("partial pull cannot be done because "
"other repository doesn't support "
"changegroupsubset."))
else:
cg = remote.changegroupsubset(fetch, heads, 'pull')
# we use unfiltered changelog here because hidden revision must
# be taken in account for phase synchronization. They may
# becomes public and becomes visible again.
cl = self.unfiltered().changelog
clstart = len(cl)
result = self.addchangegroup(cg, 'pull', remote.url())
clend = len(cl)
added = [cl.node(r) for r in xrange(clstart, clend)]
# compute target subset
if heads is None:
# We pulled every thing possible
# sync on everything common
subset = common + added
else:
# We pulled a specific subset
# sync on this subset
subset = heads
# Get remote phases data from remote
remotephases = remote.listkeys('phases')
publishing = bool(remotephases.get('publishing', False))
if remotephases and not publishing:
# remote is new and unpublishing
pheads, _dr = phases.analyzeremotephases(self, subset,
remotephases)
phases.advanceboundary(self, phases.public, pheads)
phases.advanceboundary(self, phases.draft, subset)
else:
# Remote is old or publishing all common changesets
# should be seen as public
phases.advanceboundary(self, phases.public, subset)
def gettransaction():
if tr is None:
return self.transaction(trname)
return tr
obstr = obsolete.syncpull(self, remote, gettransaction)
if obstr is not None:
tr = obstr
if tr is not None:
tr.close()
finally:
if tr is not None:
tr.release()
lock.release()
return result
def checkpush(self, force, revs):
"""Extensions can override this function if additional checks have
to be performed before pushing, or call it if they override push
command.
"""
pass
def push(self, remote, force=False, revs=None, newbranch=False):
'''Push outgoing changesets (limited by revs) from the current
repository to remote. Return an integer:
- None means nothing to push
- 0 means HTTP error
- 1 means we pushed and remote head count is unchanged *or*
we have outgoing changesets but refused to push
- other values as described by addchangegroup()
'''
# there are two ways to push to remote repo:
#
# addchangegroup assumes local user can lock remote
# repo (local filesystem, old ssh servers).
#
# unbundle assumes local user cannot lock remote repo (new ssh
# servers, http servers).
if not remote.canpush():
raise util.Abort(_("destination does not support push"))
unfi = self.unfiltered()
def localphasemove(nodes, phase=phases.public):
"""move <nodes> to <phase> in the local source repo"""
if locallock is not None:
phases.advanceboundary(self, phase, nodes)
else:
# repo is not locked, do not change any phases!
# Informs the user that phases should have been moved when
# applicable.
actualmoves = [n for n in nodes if phase < self[n].phase()]
phasestr = phases.phasenames[phase]
if actualmoves:
self.ui.status(_('cannot lock source repo, skipping local'
' %s phase update\n') % phasestr)
# get local lock as we might write phase data
locallock = None
try:
locallock = self.lock()
except IOError, err:
if err.errno != errno.EACCES:
raise
# source repo cannot be locked.
# We do not abort the push, but just disable the local phase
# synchronisation.
msg = 'cannot lock source repository: %s\n' % err
self.ui.debug(msg)
try:
self.checkpush(force, revs)
lock = None
unbundle = remote.capable('unbundle')
if not unbundle:
lock = remote.lock()
try:
# discovery
fci = discovery.findcommonincoming
commoninc = fci(unfi, remote, force=force)
common, inc, remoteheads = commoninc
fco = discovery.findcommonoutgoing
outgoing = fco(unfi, remote, onlyheads=revs,
commoninc=commoninc, force=force)
if not outgoing.missing:
# nothing to push
scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
ret = None
else:
# something to push
if not force:
# if self.obsstore == False --> no obsolete
# then, save the iteration
if unfi.obsstore:
# this message are here for 80 char limit reason
mso = _("push includes obsolete changeset: %s!")
mst = "push includes %s changeset: %s!"
# plain versions for i18n tool to detect them
_("push includes unstable changeset: %s!")
_("push includes bumped changeset: %s!")
_("push includes divergent changeset: %s!")
# If we are to push if there is at least one
# obsolete or unstable changeset in missing, at
# least one of the missinghead will be obsolete or
# unstable. So checking heads only is ok
for node in outgoing.missingheads:
ctx = unfi[node]
if ctx.obsolete():
raise util.Abort(mso % ctx)
elif ctx.troubled():
raise util.Abort(_(mst)
% (ctx.troubles()[0],
ctx))
discovery.checkheads(unfi, remote, outgoing,
remoteheads, newbranch,
bool(inc))
# TODO: get bundlecaps from remote
bundlecaps = None
# create a changegroup from local
if revs is None and not outgoing.excluded:
# push everything,
# use the fast path, no race possible on push
bundler = changegroup.bundle10(self, bundlecaps)
cg = self._changegroupsubset(outgoing,
bundler,
'push',
fastpath=True)
else:
cg = self.getlocalbundle('push', outgoing, bundlecaps)
# apply changegroup to remote
if unbundle:
# local repo finds heads on server, finds out what
# revs it must push. once revs transferred, if server
# finds it has different heads (someone else won
# commit/push race), server aborts.
if force:
remoteheads = ['force']
# ssh: return remote's addchangegroup()
# http: return remote's addchangegroup() or 0 for error
ret = remote.unbundle(cg, remoteheads, 'push')
else:
# we return an integer indicating remote head count
# change
ret = remote.addchangegroup(cg, 'push', self.url())
if ret:
# push succeed, synchronize target of the push
cheads = outgoing.missingheads
elif revs is None:
# All out push fails. synchronize all common
cheads = outgoing.commonheads
else:
# I want cheads = heads(::missingheads and ::commonheads)
# (missingheads is revs with secret changeset filtered out)
#
# This can be expressed as:
# cheads = ( (missingheads and ::commonheads)
# + (commonheads and ::missingheads))"
# )
#
# while trying to push we already computed the following:
# common = (::commonheads)
# missing = ((commonheads::missingheads) - commonheads)
#
# We can pick:
# * missingheads part of common (::commonheads)
common = set(outgoing.common)
cheads = [node for node in revs if node in common]
# and
# * commonheads parents on missing
revset = unfi.set('%ln and parents(roots(%ln))',
outgoing.commonheads,
outgoing.missing)
cheads.extend(c.node() for c in revset)
# even when we don't push, exchanging phase data is useful
remotephases = remote.listkeys('phases')
if (self.ui.configbool('ui', '_usedassubrepo', False)
and remotephases # server supports phases
and ret is None # nothing was pushed
and remotephases.get('publishing', False)):
# When:
# - this is a subrepo push
# - and remote support phase
# - and no changeset was pushed
# - and remote is publishing
# We may be in issue 3871 case!
# We drop the possible phase synchronisation done by
# courtesy to publish changesets possibly locally draft
# on the remote.
remotephases = {'publishing': 'True'}
if not remotephases: # old server or public only repo
localphasemove(cheads)
# don't push any phase data as there is nothing to push
else:
ana = phases.analyzeremotephases(self, cheads, remotephases)
pheads, droots = ana
### Apply remote phase on local
if remotephases.get('publishing', False):
localphasemove(cheads)
else: # publish = False
localphasemove(pheads)
localphasemove(cheads, phases.draft)
### Apply local phase on remote
# Get the list of all revs draft on remote by public here.
# XXX Beware that revset break if droots is not strictly
# XXX root we may want to ensure it is but it is costly
outdated = unfi.set('heads((%ln::%ln) and public())',
droots, cheads)
for newremotehead in outdated:
r = remote.pushkey('phases',
newremotehead.hex(),
str(phases.draft),
str(phases.public))
if not r:
self.ui.warn(_('updating %s to public failed!\n')
% newremotehead)
self.ui.debug('try to push obsolete markers to remote\n')
obsolete.syncpush(self, remote)
finally:
if lock is not None:
lock.release()
finally:
if locallock is not None:
locallock.release()
self.ui.debug("checking for updated bookmarks\n")
rb = remote.listkeys('bookmarks')
for k in rb.keys():
if k in unfi._bookmarks:
nr, nl = rb[k], hex(self._bookmarks[k])
if nr in unfi:
cr = unfi[nr]
cl = unfi[nl]
if bookmarks.validdest(unfi, cr, cl):
r = remote.pushkey('bookmarks', k, nr, nl)
if r:
self.ui.status(_("updating bookmark %s\n") % k)
else:
self.ui.warn(_('updating bookmark %s'
' failed!\n') % k)
return ret
def changegroupinfo(self, nodes, source):
if self.ui.verbose or source == 'bundle':
self.ui.status(_("%d changesets found\n") % len(nodes))
if self.ui.debugflag:
self.ui.debug("list of changesets:\n")
for node in nodes:
self.ui.debug("%s\n" % hex(node))
def changegroupsubset(self, bases, heads, source):
"""Compute a changegroup consisting of all the nodes that are
descendants of any of the bases and ancestors of any of the heads.
Return a chunkbuffer object whose read() method will return
successive changegroup chunks.
It is fairly complex as determining which filenodes and which
manifest nodes need to be included for the changeset to be complete
is non-trivial.
Another wrinkle is doing the reverse, figuring out which changeset in
the changegroup a particular filenode or manifestnode belongs to.
"""
cl = self.changelog
if not bases:
bases = [nullid]
# TODO: remove call to nodesbetween.
csets, bases, heads = cl.nodesbetween(bases, heads)
bases = [p for n in bases for p in cl.parents(n) if p != nullid]
outgoing = discovery.outgoing(cl, bases, heads)
bundler = changegroup.bundle10(self)
return self._changegroupsubset(outgoing, bundler, source)
def getlocalbundle(self, source, outgoing, bundlecaps=None):
"""Like getbundle, but taking a discovery.outgoing as an argument.
This is only implemented for local repos and reuses potentially
precomputed sets in outgoing."""
if not outgoing.missing:
return None
bundler = changegroup.bundle10(self, bundlecaps)
return self._changegroupsubset(outgoing, bundler, source)
def getbundle(self, source, heads=None, common=None, bundlecaps=None):
"""Like changegroupsubset, but returns the set difference between the
ancestors of heads and the ancestors common.
If heads is None, use the local heads. If common is None, use [nullid].
The nodes in common might not all be known locally due to the way the
current discovery protocol works.
"""
cl = self.changelog
if common:
hasnode = cl.hasnode
common = [n for n in common if hasnode(n)]
else:
common = [nullid]
if not heads:
heads = cl.heads()
return self.getlocalbundle(source,
discovery.outgoing(cl, common, heads),
bundlecaps=bundlecaps)
@unfilteredmethod
def _changegroupsubset(self, outgoing, bundler, source,
fastpath=False):
commonrevs = outgoing.common
csets = outgoing.missing
heads = outgoing.missingheads
# We go through the fast path if we get told to, or if all (unfiltered
# heads have been requested (since we then know there all linkrevs will
# be pulled by the client).
heads.sort()
fastpathlinkrev = fastpath or (
self.filtername is None and heads == sorted(self.heads()))
self.hook('preoutgoing', throw=True, source=source)
self.changegroupinfo(csets, source)
gengroup = bundler.generate(commonrevs, csets, fastpathlinkrev, source)
return changegroup.unbundle10(util.chunkbuffer(gengroup), 'UN')
def changegroup(self, basenodes, source):
# to avoid a race we use changegroupsubset() (issue1320)
return self.changegroupsubset(basenodes, self.heads(), source)
@unfilteredmethod
def addchangegroup(self, source, srctype, url, emptyok=False):
"""Add the changegroup returned by source.read() to this repo.
srctype is a string like 'push', 'pull', or 'unbundle'. url is
the URL of the repo where this changegroup is coming from.
Return an integer summarizing the change to this repo:
- nothing changed or no source: 0
- more heads than before: 1+added heads (2..n)
- fewer heads than before: -1-removed heads (-2..-n)
- number of heads stays the same: 1
"""
def csmap(x):
self.ui.debug("add changeset %s\n" % short(x))
return len(cl)
def revmap(x):
return cl.rev(x)
if not source:
return 0
self.hook('prechangegroup', throw=True, source=srctype, url=url)
changesets = files = revisions = 0
efiles = set()
# write changelog data to temp files so concurrent readers will not see
# inconsistent view
cl = self.changelog
cl.delayupdate()
oldheads = cl.heads()
tr = self.transaction("\n".join([srctype, util.hidepassword(url)]))
try:
trp = weakref.proxy(tr)
# pull off the changeset group
self.ui.status(_("adding changesets\n"))
clstart = len(cl)
class prog(object):
step = _('changesets')
count = 1
ui = self.ui
total = None
def __call__(self):
self.ui.progress(self.step, self.count, unit=_('chunks'),
total=self.total)
self.count += 1
pr = prog()
source.callback = pr
source.changelogheader()
srccontent = cl.addgroup(source, csmap, trp)
if not (srccontent or emptyok):
raise util.Abort(_("received changelog group is empty"))
clend = len(cl)
changesets = clend - clstart
for c in xrange(clstart, clend):
efiles.update(self[c].files())
efiles = len(efiles)
self.ui.progress(_('changesets'), None)
# pull off the manifest group
self.ui.status(_("adding manifests\n"))
pr.step = _('manifests')
pr.count = 1
pr.total = changesets # manifests <= changesets
# no need to check for empty manifest group here:
# if the result of the merge of 1 and 2 is the same in 3 and 4,
# no new manifest will be created and the manifest group will
# be empty during the pull
source.manifestheader()
self.manifest.addgroup(source, revmap, trp)
self.ui.progress(_('manifests'), None)
needfiles = {}
if self.ui.configbool('server', 'validate', default=False):
# validate incoming csets have their manifests
for cset in xrange(clstart, clend):
mfest = self.changelog.read(self.changelog.node(cset))[0]
mfest = self.manifest.readdelta(mfest)
# store file nodes we must see
for f, n in mfest.iteritems():
needfiles.setdefault(f, set()).add(n)
# process the files
self.ui.status(_("adding file changes\n"))
pr.step = _('files')
pr.count = 1
pr.total = efiles
source.callback = None
newrevs, newfiles = self.addchangegroupfiles(source, revmap, trp,
pr, needfiles)
revisions += newrevs
files += newfiles
dh = 0
if oldheads:
heads = cl.heads()
dh = len(heads) - len(oldheads)
for h in heads:
if h not in oldheads and self[h].closesbranch():
dh -= 1
htext = ""
if dh:
htext = _(" (%+d heads)") % dh
self.ui.status(_("added %d changesets"
" with %d changes to %d files%s\n")
% (changesets, revisions, files, htext))
self.invalidatevolatilesets()
if changesets > 0:
p = lambda: cl.writepending() and self.root or ""
self.hook('pretxnchangegroup', throw=True,
node=hex(cl.node(clstart)), source=srctype,
url=url, pending=p)
added = [cl.node(r) for r in xrange(clstart, clend)]
publishing = self.ui.configbool('phases', 'publish', True)
if srctype == 'push':
# Old server can not push the boundary themself.
# New server won't push the boundary if changeset already
# existed locally as secrete
#
# We should not use added here but the list of all change in
# the bundle
if publishing:
phases.advanceboundary(self, phases.public, srccontent)
else:
phases.advanceboundary(self, phases.draft, srccontent)
phases.retractboundary(self, phases.draft, added)
elif srctype != 'strip':
# publishing only alter behavior during push
#
# strip should not touch boundary at all
phases.retractboundary(self, phases.draft, added)
# make changelog see real files again
cl.finalize(trp)
tr.close()
if changesets > 0:
if srctype != 'strip':
# During strip, branchcache is invalid but coming call to
# `destroyed` will repair it.
# In other case we can safely update cache on disk.
branchmap.updatecache(self.filtered('served'))
def runhooks():
# forcefully update the on-disk branch cache
self.ui.debug("updating the branch cache\n")
self.hook("changegroup", node=hex(cl.node(clstart)),
source=srctype, url=url)
for n in added:
self.hook("incoming", node=hex(n), source=srctype,
url=url)
newheads = [h for h in self.heads() if h not in oldheads]
self.ui.log("incoming",
"%s incoming changes - new heads: %s\n",
len(added),
', '.join([hex(c[:6]) for c in newheads]))
self._afterlock(runhooks)
finally:
tr.release()
# never return 0 here:
if dh < 0:
return dh - 1
else:
return dh + 1
def addchangegroupfiles(self, source, revmap, trp, pr, needfiles):
revisions = 0
files = 0
while True:
chunkdata = source.filelogheader()
if not chunkdata:
break
f = chunkdata["filename"]
self.ui.debug("adding %s revisions\n" % f)
pr()
fl = self.file(f)
o = len(fl)
if not fl.addgroup(source, revmap, trp):
raise util.Abort(_("received file revlog group is empty"))
revisions += len(fl) - o
files += 1
if f in needfiles:
needs = needfiles[f]
for new in xrange(o, len(fl)):
n = fl.node(new)
if n in needs:
needs.remove(n)
else:
raise util.Abort(
_("received spurious file revlog entry"))
if not needs:
del needfiles[f]
self.ui.progress(_('files'), None)
for f, needs in needfiles.iteritems():
fl = self.file(f)
for n in needs:
try:
fl.rev(n)
except error.LookupError:
raise util.Abort(
_('missing file data for %s:%s - run hg verify') %
(f, hex(n)))
return revisions, files
def stream_in(self, remote, requirements):
lock = self.lock()
try:
# Save remote branchmap. We will use it later
# to speed up branchcache creation
rbranchmap = None
if remote.capable("branchmap"):
rbranchmap = remote.branchmap()
fp = remote.stream_out()
l = fp.readline()
try:
resp = int(l)
except ValueError:
raise error.ResponseError(
_('unexpected response from remote server:'), l)
if resp == 1:
raise util.Abort(_('operation forbidden by server'))
elif resp == 2:
raise util.Abort(_('locking the remote repository failed'))
elif resp != 0:
raise util.Abort(_('the server sent an unknown error code'))
self.ui.status(_('streaming all changes\n'))
l = fp.readline()
try:
total_files, total_bytes = map(int, l.split(' ', 1))
except (ValueError, TypeError):
raise error.ResponseError(
_('unexpected response from remote server:'), l)
self.ui.status(_('%d files to transfer, %s of data\n') %
(total_files, util.bytecount(total_bytes)))
handled_bytes = 0
self.ui.progress(_('clone'), 0, total=total_bytes)
start = time.time()
for i in xrange(total_files):
# XXX doesn't support '\n' or '\r' in filenames
l = fp.readline()
try:
name, size = l.split('\0', 1)
size = int(size)
except (ValueError, TypeError):
raise error.ResponseError(
_('unexpected response from remote server:'), l)
if self.ui.debugflag:
self.ui.debug('adding %s (%s)\n' %
(name, util.bytecount(size)))
# for backwards compat, name was partially encoded
ofp = self.sopener(store.decodedir(name), 'w')
for chunk in util.filechunkiter(fp, limit=size):
handled_bytes += len(chunk)
self.ui.progress(_('clone'), handled_bytes,
total=total_bytes)
ofp.write(chunk)
ofp.close()
elapsed = time.time() - start
if elapsed <= 0:
elapsed = 0.001
self.ui.progress(_('clone'), None)
self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
(util.bytecount(total_bytes), elapsed,
util.bytecount(total_bytes / elapsed)))
# new requirements = old non-format requirements +
# new format-related
# requirements from the streamed-in repository
requirements.update(set(self.requirements) - self.supportedformats)
self._applyrequirements(requirements)
self._writerequirements()
if rbranchmap:
rbheads = []
for bheads in rbranchmap.itervalues():
rbheads.extend(bheads)
if rbheads:
rtiprev = max((int(self.changelog.rev(node))
for node in rbheads))
cache = branchmap.branchcache(rbranchmap,
self[rtiprev].node(),
rtiprev)
# Try to stick it as low as possible
# filter above served are unlikely to be fetch from a clone
for candidate in ('base', 'immutable', 'served'):
rview = self.filtered(candidate)
if cache.validfor(rview):
self._branchcaches[candidate] = cache
cache.write(rview)
break
self.invalidate()
return len(self.heads()) + 1
finally:
lock.release()
def clone(self, remote, heads=[], stream=False):
'''clone remote repository.
keyword arguments:
heads: list of revs to clone (forces use of pull)
stream: use streaming clone if possible'''
# now, all clients that can request uncompressed clones can
# read repo formats supported by all servers that can serve
# them.
# if revlog format changes, client will have to check version
# and format flags on "stream" capability, and use
# uncompressed only if compatible.
if not stream:
# if the server explicitly prefers to stream (for fast LANs)
stream = remote.capable('stream-preferred')
if stream and not heads:
# 'stream' means remote revlog format is revlogv1 only
if remote.capable('stream'):
return self.stream_in(remote, set(('revlogv1',)))
# otherwise, 'streamreqs' contains the remote revlog format
streamreqs = remote.capable('streamreqs')
if streamreqs:
streamreqs = set(streamreqs.split(','))
# if we support it, stream in and adjust our requirements
if not streamreqs - self.supportedformats:
return self.stream_in(remote, streamreqs)
return self.pull(remote, heads)
def pushkey(self, namespace, key, old, new):
self.hook('prepushkey', throw=True, namespace=namespace, key=key,
old=old, new=new)
self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
ret = pushkey.push(self, namespace, key, old, new)
self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
ret=ret)
return ret
def listkeys(self, namespace):
self.hook('prelistkeys', throw=True, namespace=namespace)
self.ui.debug('listing keys for "%s"\n' % namespace)
values = pushkey.list(self, namespace)
self.hook('listkeys', namespace=namespace, values=values)
return values
def debugwireargs(self, one, two, three=None, four=None, five=None):
'''used to test argument passing over the wire'''
return "%s %s %s %s %s" % (one, two, three, four, five)
def savecommitmessage(self, text):
fp = self.opener('last-message.txt', 'wb')
try:
fp.write(text)
finally:
fp.close()
return self.pathto(fp.name[len(self.root) + 1:])
# used to avoid circular references so destructors work
def aftertrans(files):
renamefiles = [tuple(t) for t in files]
def a():
for vfs, src, dest in renamefiles:
try:
vfs.rename(src, dest)
except OSError: # journal file does not yet exist
pass
return a
def undoname(fn):
base, name = os.path.split(fn)
assert name.startswith('journal')
return os.path.join(base, name.replace('journal', 'undo', 1))
def instance(ui, path, create):
return localrepository(ui, util.urllocalpath(path), create)
def islocal(path):
return True<|fim▁end|> | return self.local() # so statichttprepo's override of local() works
|
<|file_name|>Sergeant.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package com.iluwatar;
public class Sergeant extends Unit {
public Sergeant(Unit ... children) {
super(children);
}
@Override
public void accept(UnitVisitor visitor) {
visitor.visitSergeant(this);
super.accept(visitor);
}
@Override
public String toString() {
return "sergeant";
}
}<|fim▁end|> | |
<|file_name|>GA_8Queens.py<|end_file_name|><|fim▁begin|>'''
Solution for Eight Queens Problem Using the Genetic Algorithms
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Copyright (C) 2012 Enes Ates
Authors: Enes Ates - [email protected]
'''
import random
from random import randint
class Chromosome():
def __init__(self):
self.queens = []
self.fitness = 0
class GA_8Queens:
def __init__(self):
self.population = 10
self.chromosomes = []
self.prob_crossover = 0.9 # crossover possibility
self.prob_mutation = 0.1 # mutation possibility
self.fitness = 28 # successor value
self.generation = 0
self.max_generation = 50
self.optimum = Chromosome()
self.crossover_method = "two-point"
def run(self):
self.create_initial_chromosomes()
while not self.success():
self.next_gen()
self.generation_info(self.chromosomes)
print "Result", self.optimum.queens, self.optimum.fitness
def success(self):
return self.generation >= self.max_generation or \
self.fitness == self.optimum.fitness
def next_gen(self):
next_generation = []
self.generation += 1
success = False
next_generation.append(self.optimum)
while len(next_generation) < self.population and success == False:
success = self.crossover(self.chromosomes, next_generation)
self.chromosomes = next_generation
def generation_info(self, chromosomes):
i=0
print "\n\n"
print self.generation, ". generation"
print "-----------------------------\n"
for chrom in chromosomes:
i += 1
print i, ". chromosome: ", chrom.queens, ", fitness: ", chrom.fitness
if (chrom.fitness > self.optimum.fitness):
self.optimum = chrom
print "Optimum:", self.optimum.queens, self.optimum.fitness
def create_initial_chromosomes(self):
for i in range(0, self.population):
chromosome = Chromosome()
chromosome.queens = range(0, 8)
random.shuffle(chromosome.queens)
chromosome.fitness = self.calc_fitness(chromosome.queens)
self.chromosomes.append(chromosome)
self.generation += 1
self.generation_info(self.chromosomes)
def calc_fitness(self, queens):
fitness = self.fitness
for i in range(0, 8):
for j in range(i+1, 8):
if((j-i) == abs(queens[i] - queens[j])):
fitness -= 1
return fitness
def crossover(self, chromosomes, next_generation):
first_chrom = self.choose_chromosome(chromosomes)
chromosomes.remove(first_chrom)
second_chrom = self.choose_chromosome(chromosomes)
chromosomes.append(first_chrom)
if random.random() < self.prob_crossover:
child_1 = Chromosome()
child_2 = Chromosome()
if self.crossover_method == "one-point":
child_1.queens = first_chrom.queens[0:5] + second_chrom.queens[5:8]
child_2.queens = second_chrom.queens[0:5] + first_chrom.queens[5:8]
elif self.crossover_method == "two-point":
child_1.queens = first_chrom.queens[0:3] + second_chrom.queens[3:6] + first_chrom.queens[6:8]
child_2.queens = second_chrom.queens[0:3] + first_chrom.queens[3:6] + second_chrom.queens[6:8]
elif self.crossover_method == "random-point":
for i in range(0,8):
first, second = random.sample([first_chrom.queens[i], second_chrom.queens[i]], 2)
child_1.queens.append(first), child_2.queens.append(second)
child_1.fitness = self.calc_fitness(child_1.queens)
child_2.fitness = self.calc_fitness(child_2.queens)<|fim▁hole|>
if child_1.fitness == self.fitness or child_2.fitness == self.fitness:
success = True
print "Crossover result:", first_chrom.queens, "with", second_chrom.queens, "-->", child_1.queens, "fitness:", child_1.fitness
success = self.mutation(child_1, next_generation)
print "Crossover result:", first_chrom.queens, "with", second_chrom.queens, "-->", child_2.queens, "fitness:", child_2.fitness
success = self.mutation(child_2, next_generation)
else:
success = self.mutation(first_chrom, next_generation)
success = self.mutation(second_chrom, next_generation)
return success
def mutation(self, chromosome, next_generation):
for i in range(0,8):
if random.random() < self.prob_mutation:
chromosome.queens[i] = random.randint(0, 7)
chromosome.fitness = self.calc_fitness(chromosome.queens)
print "Mutation result:", chromosome.queens, "fitness:", chromosome.fitness
next_generation.append(chromosome)
if chromosome.fitness == self.fitness:
return True
else:
return False
def choose_chromosome(self, chromosomes):
total_fitness = 0
for chrom in chromosomes:
total_fitness += chrom.fitness
rand = randint(1, total_fitness)
roulette = 0
for chrom in self.chromosomes:
roulette += chrom.fitness
if rand <= roulette:
return chrom
if __name__ == "__main__":
ga8 = GA_8Queens()
ga8.run()<|fim▁end|> | |
<|file_name|>hash.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use digest::{Digest, FixedOutput};
use sha2::Sha256;
use std::error::Error;
use std::fmt;
use std::io::{self, Write};
use hex;
const FINGERPRINT_SIZE: usize = 32;
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub struct Fingerprint(pub [u8; FINGERPRINT_SIZE]);
impl Fingerprint {
pub fn from_bytes_unsafe(bytes: &[u8]) -> Fingerprint {
if bytes.len() != FINGERPRINT_SIZE {
panic!(
"Input value was not a fingerprint; had length: {}",
bytes.len()
);
}
let mut fingerprint = [0; FINGERPRINT_SIZE];
fingerprint.clone_from_slice(&bytes[0..FINGERPRINT_SIZE]);
Fingerprint(fingerprint)
}
pub fn from_hex_string(hex_string: &str) -> Result<Fingerprint, String> {
<[u8; FINGERPRINT_SIZE] as hex::FromHex>::from_hex(hex_string)
.map(|v| Fingerprint(v))
.map_err(|e| e.description().to_string())
}
pub fn as_bytes(&self) -> &[u8; FINGERPRINT_SIZE] {
&self.0
}
pub fn to_hex(&self) -> String {
let mut s = String::new();
for &byte in self.0.iter() {
fmt::Write::write_fmt(&mut s, format_args!("{:02x}", byte)).unwrap();
}
s
}
}
impl fmt::Display for Fingerprint {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.to_hex())
}
}
impl fmt::Debug for Fingerprint {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Fingerprint<{}>", self.to_hex())
}
}
///
/// A Write instance that fingerprints all data that passes through it.
///
pub struct WriterHasher<W: Write> {
hasher: Sha256,
inner: W,
}
impl<W: Write> WriterHasher<W> {
pub fn new(inner: W) -> WriterHasher<W> {
WriterHasher {
hasher: Sha256::default(),
inner: inner,
}
}
///
/// Returns the result of fingerprinting this stream, and Drops the stream.
///
pub fn finish(self) -> Fingerprint {
Fingerprint::from_bytes_unsafe(&self.hasher.fixed_result())
}
}
impl<W: Write> Write for WriterHasher<W> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let written = self.inner.write(buf)?;
// Hash the bytes that were successfully written.
self.hasher.input(&buf[0..written]);
Ok(written)
}
fn flush(&mut self) -> io::Result<()> {
self.inner.flush()
}
}
#[cfg(test)]
mod fingerprint_tests {
use super::Fingerprint;
#[test]
fn from_bytes_unsafe() {
assert_eq!(
Fingerprint::from_bytes_unsafe(&vec![
0xab,
0xab,
0xab,
0xab,
0xab,<|fim▁hole|> 0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
0xab,
]),
Fingerprint([0xab; 32])
);
}
#[test]
fn from_hex_string() {
assert_eq!(
Fingerprint::from_hex_string(
"0123456789abcdefFEDCBA98765432100000000000000000ffFFfFfFFfFfFFff",
).unwrap(),
Fingerprint(
[
0x01,
0x23,
0x45,
0x67,
0x89,
0xab,
0xcd,
0xef,
0xfe,
0xdc,
0xba,
0x98,
0x76,
0x54,
0x32,
0x10,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0xff,
0xff,
0xff,
0xff,
0xff,
0xff,
0xff,
0xff,
],
)
)
}
#[test]
fn from_hex_string_not_long_enough() {
Fingerprint::from_hex_string("abcd").expect_err("Want err");
}
#[test]
fn from_hex_string_too_long() {
Fingerprint::from_hex_string(
"0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0",
).expect_err("Want err");
}
#[test]
fn from_hex_string_invalid_chars() {
Fingerprint::from_hex_string(
"Q123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF",
).expect_err("Want err");
}
#[test]
fn to_hex() {
assert_eq!(
Fingerprint(
[
0x01,
0x23,
0x45,
0x67,
0x89,
0xab,
0xcd,
0xef,
0xfe,
0xdc,
0xba,
0x98,
0x76,
0x54,
0x32,
0x10,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0xff,
0xff,
0xff,
0xff,
0xff,
0xff,
0xff,
0xff,
],
).to_hex(),
"0123456789abcdeffedcba98765432100000000000000000ffffffffffffffff".to_lowercase()
)
}
#[test]
fn display() {
let hex = "0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF";
assert_eq!(
Fingerprint::from_hex_string(hex).unwrap().to_hex(),
hex.to_lowercase()
)
}
}<|fim▁end|> | 0xab,
0xab, |
<|file_name|>h8.py<|end_file_name|><|fim▁begin|>import envi.archs.h8.emu as h8_emu
import envi.archs.h8.regs as h8_regs
import vivisect.impemu.emulator as v_i_emulator<|fim▁hole|>class H8WorkspaceEmulator(v_i_emulator.WorkspaceEmulator, h8_emu.H8Emulator):
taintregs = [h8_regs.REG_ER0, h8_regs.REG_ER1, h8_regs.REG_ER2]
def __init__(self, vw, logwrite=False, logread=False):
h8_emu.H8Emulator.__init__(self)
v_i_emulator.WorkspaceEmulator.__init__(self, vw, logwrite=logwrite, logread=logread)<|fim▁end|> | |
<|file_name|>compose-environment-manager.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015-2017 Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
'use strict';
import {EnvironmentManager} from './environment-manager';
import {IEnvironmentManagerMachine} from './environment-manager-machine';
import {ComposeParser, IComposeRecipe} from './compose-parser';
/**
* This is the implementation of environment manager that handles the docker compose format.
*
* Format sample and specific description:
* <code>
* services:
* devmachine:
* image: codenvy/ubuntu_jdk8
* depends_on:
* - anotherMachine
* mem_limit: 2147483648
* anotherMachine:
* image: codenvy/ubuntu_jdk8
* depends_on:
* - thirdMachine
* mem_limit: 1073741824
* thirdMachine:
* image: codenvy/ubuntu_jdk8
* mem_limit: 512741824
* labels:
* com.example.description: "Accounting webapp"
* com.example.department: "Finance"
* com.example.label-with-empty-value: ""
* environment:
* SOME_ENV: development
* SHOW: 'true'
* SESSION_SECRET:
* </code>
*
*
* The recipe type is <code>compose</code>.
* Machines are described both in recipe and in machines attribute of the environment (machine configs).
* The machine configs contain memoryLimitBytes in attributes, servers and agent.
* Environment variables can be set only in recipe content.
*
* @author Ann Shumilova
*/
export class ComposeEnvironmentManager extends EnvironmentManager {
parser: ComposeParser;
constructor($log: ng.ILogService) {
super($log);
this.parser = new ComposeParser();
}
get editorMode(): string {
return 'text/x-yaml';
}
/**
* Parses recipe content
*
* @param content {string} recipe content
* @returns {IComposeRecipe} recipe object
*/
_parseRecipe(content: string): IComposeRecipe {
let recipe = null;
try {<|fim▁hole|> }
return recipe;
}
/**
* Dumps recipe object
*
* @param recipe {IComposeRecipe} recipe object
* @returns {string} recipe content
*/
_stringifyRecipe(recipe: IComposeRecipe): string {
let content = '';
try {
content = this.parser.dump(recipe);
} catch (e) {
this.$log.error(e);
}
return content;
}
/**
* Retrieves the list of machines.
*
* @param {che.IWorkspaceEnvironment} environment environment's configuration
* @param {any=} runtime runtime of active environment
* @returns {IEnvironmentManagerMachine[]} list of machines defined in environment
*/
getMachines(environment: che.IWorkspaceEnvironment, runtime?: any): IEnvironmentManagerMachine[] {
let recipe: any = null,
machines: IEnvironmentManagerMachine[] = super.getMachines(environment, runtime),
machineNames: string[] = [];
if (environment.recipe.content) {
recipe = this._parseRecipe(environment.recipe.content);
if (recipe) {
machineNames = Object.keys(recipe.services);
} else if (environment.machines) {
machineNames = Object.keys(environment.machines);
}
} else if (environment.recipe.location) {
machineNames = Object.keys(environment.machines);
}
machineNames.forEach((machineName: string) => {
let machine: IEnvironmentManagerMachine = machines.find((_machine: IEnvironmentManagerMachine) => {
return _machine.name === machineName;
});
if (!machine) {
machine = { name: machineName };
machines.push(machine);
}
machine.recipe = recipe ? recipe.services[machineName] : recipe;
if (environment.machines && environment.machines[machineName]) {
angular.merge(machine, environment.machines[machineName]);
}
// memory
let memoryLimitBytes = this.getMemoryLimit(machine);
if (memoryLimitBytes === -1 && recipe) {
this.setMemoryLimit(machine, recipe.services[machineName].mem_limit);
}
});
return machines;
}
/**
* Provides the environment configuration based on machines format.
*
* @param {che.IWorkspaceEnvironment} environment origin of the environment to be edited
* @param {IEnvironmentManagerMachine} machines the list of machines
* @returns {che.IWorkspaceEnvironment} environment's configuration
*/
getEnvironment(environment: che.IWorkspaceEnvironment, machines: IEnvironmentManagerMachine[]): che.IWorkspaceEnvironment {
let newEnvironment = super.getEnvironment(environment, machines);
if (newEnvironment.recipe.content) {
let recipe: IComposeRecipe = this._parseRecipe(newEnvironment.recipe.content);
if (recipe) {
machines.forEach((machine: IEnvironmentManagerMachine) => {
let machineName = machine.name;
if (!recipe.services[machineName]) {
return;
}
if (machine.recipe.environment && Object.keys(machine.recipe.environment).length) {
recipe.services[machineName].environment = angular.copy(machine.recipe.environment);
} else {
delete recipe.services[machineName].environment;
}
if (machine.recipe.image) {
recipe.services[machineName].image = machine.recipe.image;
}
});
try {
newEnvironment.recipe.content = this._stringifyRecipe(recipe);
} catch (e) {
this.$log.error('Cannot retrieve environment\'s recipe, error: ', e);
}
}
}
return newEnvironment;
}
/**
* Returns object which contains docker image or link to docker file and build context.
*
* @param {IEnvironmentManagerMachine} machine
* @returns {*}
*/
getSource(machine: IEnvironmentManagerMachine): any {
if (!machine.recipe) {
return null;
}
if (machine.recipe.image) {
return {image: machine.recipe.image};
} else if (machine.recipe.build) {
return machine.recipe.build;
}
}
/**
* Updates machine's image
*
* @param {IEnvironmentManagerMachine} machine
* @param {String} image
*/
setSource(machine: IEnvironmentManagerMachine, image: string) {
if (!machine.recipe) {
return;
}
machine.recipe.image = image;
}
/**
* Returns true if environment recipe content is present.
*
* @param {IEnvironmentManagerMachine} machine
* @returns {boolean}
*/
canEditEnvVariables(machine: IEnvironmentManagerMachine): boolean {
return !!machine.recipe;
}
/**
* Returns object with environment variables.
*
* @param {IEnvironmentManagerMachine} machine
* @returns {*}
*/
getEnvVariables(machine: IEnvironmentManagerMachine): any {
if (!machine.recipe) {
return null;
}
return machine.recipe.environment || {};
}
/**
* Updates machine with new environment variables.
*
* @param {IEnvironmentManagerMachine} machine
* @param {any} envVariables
*/
setEnvVariables(machine: IEnvironmentManagerMachine, envVariables: any): void {
if (!machine.recipe) {
return;
}
if (Object.keys(envVariables).length) {
machine.recipe.environment = angular.copy(envVariables);
} else {
delete machine.recipe.environment;
}
}
/**
* Renames machine.
*
* @param {che.IWorkspaceEnvironment} environment
* @param {string} oldName
* @param {string} newName
* @returns {che.IWorkspaceEnvironment} new environment
*/
renameMachine(environment: che.IWorkspaceEnvironment, oldName: string, newName: string): che.IWorkspaceEnvironment {
try {
let recipe: IComposeRecipe = this._parseRecipe(environment.recipe.content);
// fix relations to other machines in recipe
Object.keys(recipe.services).forEach((serviceName: string) => {
if (serviceName === oldName) {
return;
}
// fix 'depends_on'
let dependsOn = recipe.services[serviceName].depends_on || [],
index = dependsOn.indexOf(oldName);
if (index > -1) {
dependsOn.splice(index, 1);
dependsOn.push(newName);
}
// fix 'links'
let links = recipe.services[serviceName].links || [],
re = new RegExp('^' + oldName + '(?:$|:(.+))');
for (let i = 0; i < links.length; i++) {
if (re.test(links[i])) {
let match = links[i].match(re),
alias = match[1] || '',
newLink = alias ? newName + ':' + alias : newName;
links.splice(i, 1);
links.push(newLink);
break;
}
}
});
// rename machine in recipe
recipe.services[newName] = recipe.services[oldName];
delete recipe.services[oldName];
// try to update recipe
environment.recipe.content = this._stringifyRecipe(recipe);
// and then update config
environment.machines[newName] = environment.machines[oldName];
delete environment.machines[oldName];
} catch (e) {
this.$log.error('Cannot rename machine, error: ', e);
}
return environment;
}
/**
* Removes machine.
*
* @param {che.IWorkspaceEnvironment} environment
* @param {string} name name of machine
* @returns {che.IWorkspaceEnvironment} new environment
*/
deleteMachine(environment: che.IWorkspaceEnvironment, name: string): che.IWorkspaceEnvironment {
try {
let recipe: IComposeRecipe = this._parseRecipe(environment.recipe.content);
// fix relations to other machines in recipe
Object.keys(recipe.services).forEach((serviceName: string) => {
if (serviceName === name) {
return;
}
// fix 'depends_on'
let dependsOn = recipe.services[serviceName].depends_on || [],
index = dependsOn.indexOf(name);
if (index > -1) {
dependsOn.splice(index, 1);
if (dependsOn.length === 0) {
delete recipe.services[serviceName].depends_on;
}
}
// fix 'links'
let links = recipe.services[serviceName].links || [],
re = new RegExp('^' + name + '(?:$|:(.+))');
for (let i = 0; i < links.length; i++) {
if (re.test(links[i])) {
links.splice(i, 1);
break;
}
}
if (links.length === 0) {
delete recipe.services[serviceName].links;
}
});
// delete machine from recipe
delete recipe.services[name];
// try to update recipe
environment.recipe.content = this._stringifyRecipe(recipe);
// and then update config
delete environment.machines[name];
} catch (e) {
this.$log.error('Cannot delete machine, error: ', e);
}
return environment;
}
}<|fim▁end|> | recipe = this.parser.parse(content);
} catch (e) {
this.$log.error(e); |
<|file_name|>login.py<|end_file_name|><|fim▁begin|>from flask_login import LoginManager
<|fim▁hole|>
login_manager = LoginManager()
@login_manager.user_loader
def load_user(user_id):
return User.get(user_id=user_id)<|fim▁end|> | from server.users.models import User |
<|file_name|>test_product_except_self.py<|end_file_name|><|fim▁begin|>"""
https://leetcode.com/explore/interview/card/top-interview-questions-hard/116/array-and-strings/827/
"""
from unittest import TestCase
from kevin.leet.product_except_self import Solution, SolutionOptimized
class TestProductExceptSelf(TestCase):
def _base_test_product_except_self(self, nums, expected):
for sol_class in [Solution, SolutionOptimized]:
sol = sol_class()
actual = sol.product_except_self(nums)
assert expected == actual, (expected, actual)
def test_product_except_self_basic(self):<|fim▁hole|> nums = [1, 2, 3, 4]
expected = [24, 12, 8, 6]
self._base_test_product_except_self(nums, expected)<|fim▁end|> | |
<|file_name|>win_impl.rs<|end_file_name|><|fim▁begin|>use winapi;
use self::winapi::shared::windef::POINT;
use self::winapi::ctypes::c_int;
use self::winapi::um::winuser::*;
use crate::win::keycodes::*;
use crate::{Key, KeyboardControllable, MouseButton, MouseControllable};
use std::mem::*;
/// The main struct for handling the event emitting
#[derive(Default)]
pub struct Enigo;
fn mouse_event(flags: u32, data: u32, dx: i32, dy: i32) {
let mut input = INPUT {
type_: INPUT_MOUSE,
u: unsafe {
transmute(MOUSEINPUT {
dx,
dy,
mouseData: data,
dwFlags: flags,
time: 0,
dwExtraInfo: 0,
})
},
};
unsafe { SendInput(1, &mut input as LPINPUT, size_of::<INPUT>() as c_int) };
}
fn keybd_event(flags: u32, vk: u16, scan: u16) {
let mut input = INPUT {
type_: INPUT_KEYBOARD,
u: unsafe {
transmute_copy(&KEYBDINPUT {
wVk: vk,
wScan: scan,
dwFlags: flags,
time: 0,
dwExtraInfo: 0,
})
},
};
unsafe { SendInput(1, &mut input as LPINPUT, size_of::<INPUT>() as c_int) };
}
impl MouseControllable for Enigo {
fn mouse_move_to(&mut self, x: i32, y: i32) {
mouse_event(
MOUSEEVENTF_MOVE | MOUSEEVENTF_ABSOLUTE | MOUSEEVENTF_VIRTUALDESK,
0,
(x - unsafe { GetSystemMetrics(SM_XVIRTUALSCREEN) }) * 65535 / unsafe { GetSystemMetrics(SM_CXVIRTUALSCREEN) },
(y - unsafe { GetSystemMetrics(SM_YVIRTUALSCREEN) }) * 65535 / unsafe { GetSystemMetrics(SM_CYVIRTUALSCREEN) },
);
}
fn mouse_move_relative(&mut self, x: i32, y: i32) {
mouse_event(MOUSEEVENTF_MOVE, 0, x, y);
}
fn mouse_down(&mut self, button: MouseButton) {
mouse_event(
match button {
MouseButton::Left => MOUSEEVENTF_LEFTDOWN,
MouseButton::Middle => MOUSEEVENTF_MIDDLEDOWN,
MouseButton::Right => MOUSEEVENTF_RIGHTDOWN,
_ => unimplemented!(),
},
0,
0,
0,
);
}
fn mouse_up(&mut self, button: MouseButton) {
mouse_event(
match button {
MouseButton::Left => MOUSEEVENTF_LEFTUP,
MouseButton::Middle => MOUSEEVENTF_MIDDLEUP,
MouseButton::Right => MOUSEEVENTF_RIGHTUP,
_ => unimplemented!(),
},
0,
0,
0,
);
}
fn mouse_click(&mut self, button: MouseButton) {
self.mouse_down(button);
self.mouse_up(button);
}
fn mouse_scroll_x(&mut self, length: i32) {
mouse_event(MOUSEEVENTF_HWHEEL, unsafe { transmute(length * 120) }, 0, 0);
}
fn mouse_scroll_y(&mut self, length: i32) {
mouse_event(MOUSEEVENTF_WHEEL, unsafe { transmute(length * 120) }, 0, 0);
}
}
impl KeyboardControllable for Enigo {
fn key_sequence(&mut self, sequence: &str) {
let mut buffer = [0; 2];
for c in sequence.chars() {
// Windows uses uft-16 encoding. We need to check
// for variable length characters. As such some
// characters can be 32 bit long and those are
// encoded in such called hight and low surrogates
// each 16 bit wide that needs to be send after
// another to the SendInput function without
// being interrupted by "keyup"
let result = c.encode_utf16(&mut buffer);
if result.len() == 1 {
self.unicode_key_click(result[0]);
} else {
for utf16_surrogate in result {
self.unicode_key_down(utf16_surrogate.clone());
}
// do i need to produce a keyup?
// self.unicode_key_up(0);
}
}
}
fn key_click(&mut self, key: Key) {
let scancode = self.key_to_scancode(key);
use std::{thread, time};
keybd_event(KEYEVENTF_SCANCODE, 0, scancode);
thread::sleep(time::Duration::from_millis(20));
keybd_event(KEYEVENTF_KEYUP | KEYEVENTF_SCANCODE, 0, scancode);
}
fn key_down(&mut self, key: Key) {
keybd_event(KEYEVENTF_SCANCODE, 0, self.key_to_scancode(key));
}
fn key_up(&mut self, key: Key) {
keybd_event(
KEYEVENTF_KEYUP | KEYEVENTF_SCANCODE,
0,<|fim▁hole|> }
}
impl Enigo {
/// Gets the (width, height) of the main display in screen coordinates (pixels).
///
/// # Example
///
/// ```no_run
/// use enigo::*;
/// let mut size = Enigo::main_display_size();
/// ```
pub fn main_display_size() -> (usize, usize) {
let w = unsafe { GetSystemMetrics(SM_CXSCREEN) as usize };
let h = unsafe { GetSystemMetrics(SM_CYSCREEN) as usize };
(w, h)
}
/// Gets the location of mouse in screen coordinates (pixels).
///
/// # Example
///
/// ```no_run
/// use enigo::*;
/// let mut location = Enigo::mouse_location();
/// ```
pub fn mouse_location() -> (i32, i32) {
let mut point = POINT { x: 0, y: 0 };
let result = unsafe { GetCursorPos(&mut point) };
if result != 0 {
(point.x, point.y)
} else {
(0, 0)
}
}
fn unicode_key_click(&self, unicode_char: u16) {
use std::{thread, time};
self.unicode_key_down(unicode_char);
thread::sleep(time::Duration::from_millis(20));
self.unicode_key_up(unicode_char);
}
fn unicode_key_down(&self, unicode_char: u16) {
keybd_event(KEYEVENTF_UNICODE, 0, unicode_char);
}
fn unicode_key_up(&self, unicode_char: u16) {
keybd_event(KEYEVENTF_UNICODE | KEYEVENTF_KEYUP, 0, unicode_char);
}
fn key_to_keycode(&self, key: Key) -> u16 {
// do not use the codes from crate winapi they're
// wrongly typed with i32 instead of i16 use the
// ones provided by win/keycodes.rs that are prefixed
// with an 'E' infront of the original name
#[allow(deprecated)]
// I mean duh, we still need to support deprecated keys until they're removed
match key {
Key::Alt => EVK_MENU,
Key::Backspace => EVK_BACK,
Key::CapsLock => EVK_CAPITAL,
Key::Control => EVK_LCONTROL,
Key::Delete => EVK_DELETE,
Key::DownArrow => EVK_DOWN,
Key::End => EVK_END,
Key::Escape => EVK_ESCAPE,
Key::F1 => EVK_F1,
Key::F10 => EVK_F10,
Key::F11 => EVK_F11,
Key::F12 => EVK_F12,
Key::F2 => EVK_F2,
Key::F3 => EVK_F3,
Key::F4 => EVK_F4,
Key::F5 => EVK_F5,
Key::F6 => EVK_F6,
Key::F7 => EVK_F7,
Key::F8 => EVK_F8,
Key::F9 => EVK_F9,
Key::Home => EVK_HOME,
Key::LeftArrow => EVK_LEFT,
Key::Option => EVK_MENU,
Key::PageDown => EVK_NEXT,
Key::PageUp => EVK_PRIOR,
Key::Return => EVK_RETURN,
Key::RightArrow => EVK_RIGHT,
Key::Shift => EVK_SHIFT,
Key::Space => EVK_SPACE,
Key::Tab => EVK_TAB,
Key::UpArrow => EVK_UP,
Key::Raw(raw_keycode) => raw_keycode,
Key::Layout(c) => self.get_layoutdependent_keycode(c.to_string()),
//_ => 0,
Key::Super | Key::Command | Key::Windows | Key::Meta => EVK_LWIN,
}
}
fn key_to_scancode(&self, key: Key) -> u16 {
let keycode = self.key_to_keycode(key);
unsafe { MapVirtualKeyW(keycode as u32, 0) as u16 }
}
fn get_layoutdependent_keycode(&self, string: String) -> u16 {
let mut buffer = [0; 2];
// get the first char from the string ignore the rest
// ensure its not a multybyte char
let utf16 = string
.chars()
.nth(0)
.expect("no valid input") //TODO(dustin): no panic here make an error
.encode_utf16(&mut buffer);
if utf16.len() != 1 {
// TODO(dustin) don't panic here use an apropriate errors
panic!("this char is not allowd");
}
// NOTE VkKeyScanW uses the current keyboard layout
// to specify a layout use VkKeyScanExW and GetKeyboardLayout
// or load one with LoadKeyboardLayoutW
let keycode_and_shiftstate = unsafe { VkKeyScanW(utf16[0]) };
// 0x41 as u16 //key that has the letter 'a' on it on english like keylayout
keycode_and_shiftstate as u16
}
}<|fim▁end|> | self.key_to_scancode(key),
); |
<|file_name|>DashboardVM.js<|end_file_name|><|fim▁begin|>define(["ng", "lodash"], function(ng, _){
"use strict";
var DashboardVM = function DashboardVM($scope, $rootScope){
var _self = this;
this.toggleHStretch = function(isOn){
_self.setOptions(options);
};
this.updateOptions = function(options){
ng.extend(_self, options);
};
this.resetOptions = function(){
_self.updateOptions(_self.attr);
};
this.setAttr = function(attr){
_self.attr=attr;
_self.updateOptions(attr);
};
this.remove = function(target){
// _.remove(_self.items, {name: target});
delete _self.items[target];
console.debug("remove _self.items", _self.items);
};
this.items = $scope.dashboardItems;<|fim▁hole|>// name: "survival1",
// title: "survival1",
// content: "moving around"
// },
// {
// name: "hcl1",
// title: "hcl1",
// content: "moving around"
// },
// {
// name: "ttest1",
// title: "ttest1",
// content: "moving around"
// },
// {
// name: "limma1",
// title: "limma1",
// content: "moving around"
// },
// {
// name: "deseq1",
// title: "deseq1",
// content: "moving around"
// },
// {
// name: "nmf",
// title: "nmf",
// content: "moving around"
// },
// {
// name: "kmeans1",
// title: "kmeans1",
// content: "moving around"
// }
// ];
$scope.$on("ui:dashboard:addItem", function($event, data){
var exists = _.find(_self.items, {name: data.name});
if(exists){
$rootScope.$broadcast("ui:analysisLog.append", "info", "Cannot add analysis '" + data.name + "' to the dashboard. It is already there.");
}else{
_self.items.$add(data);
}
});
$scope.$on("ui:dashboard:removeItem", function($event, data){
console.debug("on ui:dashboard:removeItem", $event, data);
_self.remove(data.name);
});
};
DashboardVM.$inject=["$scope", "$rootScope"];
DashboardVM.$name="DashboardVMController";
return DashboardVM;
});<|fim▁end|> |
// this.items = [
// { |
<|file_name|>library.py<|end_file_name|><|fim▁begin|>"""
Client for the library API.
"""
class LibraryClient(object):
"""
Library API client.
"""
def __init__(self,axilent_connection):
self.content_resource = axilent_connection.resource_client('axilent.library','content')
self.api = axilent_connection.http_client('axilent.library')
def create_content(self,content_type,project,search_index=True,**field_data):
"""
Creates the content. Returns the new content item key in the format:
<content-type>:<content-key>
"""
response = self.content_resource.post(data={'content_type':content_type,
'project':project,
'search_index':search_index,
'content':field_data})
return response['created_content']
def update_content(self,content_type,project,content_key,search_index=True,reset_workflow=True,**field_data):
"""<|fim▁hole|> 'key':content_key,
'search_index':search_index,
'reset_workflow':reset_workflow,
'content':field_data})
return response['updated_content']
def ping(self,project,content_type):
"""
Tests connection with Axilent.
"""
return self.api.ping(project=project,content_type=content_type)
def index_content(self,project,content_type,content_key):
"""
Forces re-indexing of the specified content item.
"""
response = self.api.indexcontent(content_key=content_key,
project=project,
content_type=content_type)
return response['indexed']
def tag_content(self,project,content_type,content_key,tag,search_index=True):
"""
Tags the specified content item.
"""
response = self.api.tagcontent(project=project,
content_type=content_type,
content_key=content_key,
tag=tag,
search_index=search_index)
return response['tagged_content']
def detag_content(self,project,content_type,content_key,tag,search_index=True):
"""
De-tags the specified content item.
"""
response = self.api.detagcontent(project=project,
content_type=content_type,
content_key=content_key,
tag=tag,
search_index=search_index)
return response['removed_tag']
def archive_content(self,project,content_type,content_key):
"""
Archives the content on Axilent.
"""
response = self.content_resource.delete(params={'content_type':content_type,
'project':project,
'key':content_key})
return response['archived']<|fim▁end|> | Updates existing content.
"""
response = self.content_resource.put(data={'content_type':content_type,
'project':project, |
<|file_name|>SendmywayCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from module.plugins.internal.XFSHoster import XFSHoster, create_getInfo
class SendmywayCom(XFSHoster):
__name__ = "SendmywayCom"
__type__ = "hoster"
__version__ = "0.04"
__pattern__ = r'http://(?:www\.)?sendmyway\.com/\w{12}'
__description__ = """SendMyWay hoster plugin"""<|fim▁hole|>
HOSTER_DOMAIN = "sendmyway.com"
NAME_PATTERN = r'<p class="file-name" ><.*?>\s*(?P<N>.+)'
SIZE_PATTERN = r'<small>\((?P<S>\d+) bytes\)</small>'
getInfo = create_getInfo(SendmywayCom)<|fim▁end|> | __license__ = "GPLv3"
__authors__ = [("zoidberg", "[email protected]")] |
<|file_name|>datastore.py<|end_file_name|><|fim▁begin|># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import timedelta
from st2client.client import Client
from st2client.models import KeyValuePair
from st2common.services.access import create_token
from st2common.util.api import get_full_public_api_url
from st2common.util.date import get_datetime_utc_now
from st2common.constants.keyvalue import DATASTORE_KEY_SEPARATOR, SYSTEM_SCOPE
class DatastoreService(object):
"""
Class provides public methods for accessing datastore items.
"""
DATASTORE_NAME_SEPARATOR = DATASTORE_KEY_SEPARATOR
def __init__(self, logger, pack_name, class_name, api_username):
self._api_username = api_username
self._pack_name = pack_name
self._class_name = class_name
self._logger = logger
self._client = None
self._token_expire = get_datetime_utc_now()
##################################
# Methods for datastore management
##################################
def list_values(self, local=True, prefix=None):
"""
Retrieve all the datastores items.
:param local: List values from a namespace local to this pack/class. Defaults to True.
:type: local: ``bool``
:param prefix: Optional key name prefix / startswith filter.
:type prefix: ``str``
:rtype: ``list`` of :class:`KeyValuePair`
"""
client = self._get_api_client()
self._logger.audit('Retrieving all the value from the datastore')
key_prefix = self._get_full_key_prefix(local=local, prefix=prefix)
kvps = client.keys.get_all(prefix=key_prefix)
return kvps
def get_value(self, name, local=True, scope=SYSTEM_SCOPE, decrypt=False):
"""
Retrieve a value from the datastore for the provided key.
By default, value is retrieved from the namespace local to the pack/class. If you want to
retrieve a global value from a datastore, pass local=False to this method.
:param name: Key name.
:type name: ``str``
:param local: Retrieve value from a namespace local to the pack/class. Defaults to True.
:type: local: ``bool``
:param scope: Scope under which item is saved. Defaults to system scope.
:type: local: ``str``
:param decrypt: Return the decrypted value. Defaults to False.
:type: local: ``bool``
:rtype: ``str`` or ``None``
"""
if scope != SYSTEM_SCOPE:
raise ValueError('Scope %s is unsupported.' % scope)
name = self._get_full_key_name(name=name, local=local)
client = self._get_api_client()
self._logger.audit('Retrieving value from the datastore (name=%s)', name)
try:
params = {'decrypt': str(decrypt).lower(), 'scope': scope}
kvp = client.keys.get_by_id(id=name, params=params)
except Exception as e:
self._logger.exception(
'Exception retrieving value from datastore (name=%s): %s',
name,
e
)
return None
if kvp:
return kvp.value
return None
def set_value(self, name, value, ttl=None, local=True, scope=SYSTEM_SCOPE, encrypt=False):
"""
Set a value for the provided key.
By default, value is set in a namespace local to the pack/class. If you want to
set a global value, pass local=False to this method.
:param name: Key name.
:type name: ``str``
:param value: Key value.
:type value: ``str``
:param ttl: Optional TTL (in seconds).
:type ttl: ``int``
:param local: Set value in a namespace local to the pack/class. Defaults to True.
:type: local: ``bool``
:param scope: Scope under which to place the item. Defaults to system scope.
:type: local: ``str``
:param encrypt: Encrypt the value when saving. Defaults to False.
:type: local: ``bool``
:return: ``True`` on success, ``False`` otherwise.
:rtype: ``bool``
"""
if scope != SYSTEM_SCOPE:
raise ValueError('Scope %s is unsupported.', scope)
name = self._get_full_key_name(name=name, local=local)
value = str(value)
client = self._get_api_client()
self._logger.audit('Setting value in the datastore (name=%s)', name)
instance = KeyValuePair()
instance.id = name
instance.name = name
instance.value = value
instance.scope = scope
if encrypt:
instance.secret = True
if ttl:
instance.ttl = ttl
client.keys.update(instance=instance)
return True
def delete_value(self, name, local=True, scope=SYSTEM_SCOPE):
"""
Delete the provided key.
By default, value is deleted from a namespace local to the pack/class. If you want to
delete a global value, pass local=False to this method.
:param name: Name of the key to delete.
:type name: ``str``
:param local: Delete a value in a namespace local to the pack/class. Defaults to True.
:type: local: ``bool``
:param scope: Scope under which item is saved. Defaults to system scope.
:type: local: ``str``
:return: ``True`` on success, ``False`` otherwise.
:rtype: ``bool``
"""
if scope != SYSTEM_SCOPE:
raise ValueError('Scope %s is unsupported.', scope)
name = self._get_full_key_name(name=name, local=local)
client = self._get_api_client()
instance = KeyValuePair()
instance.id = name
instance.name = name
self._logger.audit('Deleting value from the datastore (name=%s)', name)
try:
params = {'scope': scope}
client.keys.delete(instance=instance, params=params)
except Exception as e:
self._logger.exception(
'Exception deleting value from datastore (name=%s): %s',
name,
e
)
return False
<|fim▁hole|> return True
def _get_api_client(self):
"""
Retrieve API client instance.
"""
token_expire = self._token_expire <= get_datetime_utc_now()
if not self._client or token_expire:
self._logger.audit('Creating new Client object.')
ttl = (24 * 60 * 60)
self._token_expire = get_datetime_utc_now() + timedelta(seconds=ttl)
temporary_token = create_token(username=self._api_username, ttl=ttl)
api_url = get_full_public_api_url()
self._client = Client(api_url=api_url, token=temporary_token.token)
return self._client
def _get_full_key_name(self, name, local):
"""
Retrieve a full key name.
:rtype: ``str``
"""
if local:
name = self._get_key_name_with_prefix(name=name)
return name
def _get_full_key_prefix(self, local, prefix=None):
if local:
key_prefix = self._get_local_key_name_prefix()
if prefix:
key_prefix += prefix
else:
key_prefix = prefix
return key_prefix
def _get_local_key_name_prefix(self):
"""
Retrieve key prefix which is local to this pack/class.
"""
key_prefix = self._get_datastore_key_prefix() + self.DATASTORE_NAME_SEPARATOR
return key_prefix
def _get_key_name_with_prefix(self, name):
"""
Retrieve a full key name which is local to the current pack/class.
:param name: Base datastore key name.
:type name: ``str``
:rtype: ``str``
"""
prefix = self._get_datastore_key_prefix()
full_name = prefix + self.DATASTORE_NAME_SEPARATOR + name
return full_name
def _get_datastore_key_prefix(self):
prefix = '%s.%s' % (self._pack_name, self._class_name)
return prefix<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate camera_controllers;
extern crate collada;
extern crate dev_menu;
extern crate env_logger;
extern crate gfx;
extern crate gfx_text;
extern crate gfx_debug_draw;
extern crate piston;
extern crate piston_window;
extern crate sdl2_window;
extern crate shader_version;
extern crate skeletal_animation;
extern crate vecmath;
use std::collections::HashMap;
use piston_window::PistonWindow;
use gfx_debug_draw::DebugRenderer;
use piston::window::{
WindowSettings,
};
use piston::input::*;
use vecmath::{mat4_id};
use sdl2_window::Sdl2Window;
use camera_controllers::{
OrbitZoomCamera,
OrbitZoomCameraSettings,
CameraPerspective,
model_view_projection
};
mod demo;
use demo::Settings;
fn main() {
env_logger::init().unwrap();
let (win_width, win_height) = (640, 480);
let mut window: PistonWindow<Sdl2Window> =
WindowSettings::new("Skeletal Animation Demo", [win_width, win_height])
.exit_on_esc(true)
.opengl(shader_version::OpenGL::V3_2)
.build()
.unwrap();
let mut debug_renderer = {
let text_renderer = {
gfx_text::new(window.factory.clone()).unwrap()
};
DebugRenderer::new(window.factory.clone(), text_renderer, 64).ok().unwrap()
};
let model = mat4_id();
let mut projection = CameraPerspective {
fov: 90.0f32,
near_clip: 0.1,
far_clip: 1000.0,
aspect_ratio: (win_width as f32) / (win_height as f32)
}.projection();
let mut orbit_zoom_camera: OrbitZoomCamera<f32> = OrbitZoomCamera::new(
[0.0, 0.0, 0.0],
OrbitZoomCameraSettings::default()
);
// Start event loop
let mut settings = Settings {
use_dlb: true,
draw_skeleton: true,
draw_labels: false,
draw_mesh: true,
playback_speed: 1.0,
params: HashMap::new(),
};
let mut menu = dev_menu::Menu::<Settings>::new();
menu.add_item(dev_menu::MenuItem::action_item(
"Toggle DLB/LBS Skinning",
Box::new( |ref mut settings| {
settings.use_dlb = !settings.use_dlb;
})
));
menu.add_item(dev_menu::MenuItem::action_item(
"Toggle Skeleton",
Box::new( |ref mut settings| { settings.draw_skeleton = !settings.draw_skeleton; })
));
menu.add_item(dev_menu::MenuItem::action_item(
"Toggle Joint Labels",
Box::new( |ref mut settings| { settings.draw_labels = !settings.draw_labels; })
));
menu.add_item(dev_menu::MenuItem::action_item(
"Toggle Mesh",
Box::new( |ref mut settings| { settings.draw_mesh = !settings.draw_mesh; })
));
menu.add_item(dev_menu::MenuItem::slider_item(
"Playback Speed = ",
[-5.0, 5.0],
0.01,
Box::new( |ref settings| { settings.playback_speed }),
Box::new( |ref mut settings, value| { settings.playback_speed = value }),
));
let mut lbs_demo = {
demo::lbs_demo(&mut window.factory)
};
let mut dlb_demo = {
demo::dlb_demo(&mut window.factory)
};
for (param, &value) in dlb_demo.controller.get_parameters().iter() {
settings.params.insert(param.clone(), value);
// Apparently need to make our own string copies to move into each closure..
let param_copy_1 = param.clone();
let param_copy_2 = param.clone();
let range = if param == "target-x" || param == "target-y" || param == "target-z" {
[-100.0, 100.0]
} else {
[0.0, 1.0]
};
let rate = if param == "target-x" || param == "target-y" || param == "target-z" {
0.1
} else {
0.01
};
menu.add_item(dev_menu::MenuItem::slider_item(
&format!("Param[{}] = ", param)[..],
range,
rate,
Box::new( move |ref settings| {
settings.params[¶m_copy_1[..]]
}),
Box::new( move |ref mut settings, value| {
settings.params.insert(param_copy_2.clone(), value);
}),
));
}
// set head look controller params to nice initial values..
settings.params.insert("head-look-level".to_string(), 1.0);
settings.params.insert("head-look-sideways-level".to_string(), 1.0);
settings.params.insert("head-down-to-up".to_string(), 0.5);
settings.params.insert("head-left-to-right".to_string(), 0.5);
while let Some(e) = window.next() {
orbit_zoom_camera.event(&e);
menu.event(&e, &mut settings);
e.resize(|width, height| {
// Update projection matrix
projection = CameraPerspective {
fov: 90.0f32,
near_clip: 0.1,
far_clip: 1000.0,
aspect_ratio: (width as f32) / (height as f32)
}.projection();
});
e.update(|args| {
dlb_demo.update(&settings, args.dt);
lbs_demo.update(&settings, args.dt);
});
window.draw_3d(&e, |window| {
let args = e.render_args().unwrap();
window.encoder.clear(&window.output_color, [0.3, 0.3, 0.3, 1.0]);
window.encoder.clear_depth(&window.output_stencil, 1.0);
let camera_view = orbit_zoom_camera.camera(args.ext_dt).orthogonal();
let camera_projection = model_view_projection(
model,
camera_view,
projection
);
// Draw IK target...
let target = [settings.params["target-x"],
settings.params["target-y"],
settings.params["target-z"]];
debug_renderer.draw_line(vecmath::vec3_sub(target, [1.0, 0.0, 0.0]), vecmath::vec3_add(target, [1.0, 0.0, 0.0]), [1.0, 1.0, 1.0, 1.0]);
debug_renderer.draw_line(vecmath::vec3_sub(target, [0.0, 1.0, 0.0]), vecmath::vec3_add(target, [0.0, 1.0, 0.0]), [1.0, 1.0, 1.0, 1.0]);
debug_renderer.draw_line(vecmath::vec3_sub(target, [0.0, 0.0, 1.0]), vecmath::vec3_add(target, [0.0, 0.0, 1.0]), [1.0, 1.0, 1.0, 1.0]);
// Draw axes
debug_renderer.draw_line([0.0, 0.0, 0.0], [5.0, 0.0, 0.0], [1.0, 0.0, 0.0, 1.0]);
debug_renderer.draw_line([0.0, 0.0, 0.0], [0.0, 5.0, 0.0], [0.0, 1.0, 0.0, 1.0]);
debug_renderer.draw_line([0.0, 0.0, 0.0], [0.0, 0.0, 5.0], [0.0, 0.0, 1.0, 1.0]);
debug_renderer.draw_text_at_position(
"X",
[6.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 1.0],
);
debug_renderer.draw_text_at_position(
"Y",
[0.0, 6.0, 0.0],
[0.0, 1.0, 0.0, 1.0],
);
debug_renderer.draw_text_at_position(
"Z",
[0.0, 0.0, 6.0],
[0.0, 0.0, 1.0, 1.0],
);
dlb_demo.render(&settings, &mut debug_renderer,
&mut window.encoder, &window.output_color, &window.output_stencil,
camera_view, camera_projection, args.ext_dt, settings.use_dlb);<|fim▁hole|> lbs_demo.render(&settings, &mut debug_renderer,
&mut window.encoder, &window.output_color, &window.output_stencil,
camera_view, camera_projection, args.ext_dt, !settings.use_dlb);
menu.draw(&settings, &mut debug_renderer);
if let Err(e) = debug_renderer.render(&mut window.encoder, &window.output_color, &window.output_stencil, camera_projection) {
println!("{:?}", e);
}
});
}
}<|fim▁end|> | |
<|file_name|>stats.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.6
# This file is a part of Metagam project.
#
# Metagam is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# Metagam is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Metagam. If not, see <http://www.gnu.org/licenses/>.
from mg.constructor import *
from mg.core.auth import AuthLogList
from mg.constructor.player_classes import DBCharacterOnlineList, DBPlayerList
from mg.constructor.interface import DBFirstVisitList
from mg.core.config import DBConfigGroup, DBConfigGroupList
import re
class DBDailyStat(CassandraObject):
clsname = "DailyStat"
indexes = {
"period": [[], "period"],
}
class DBDailyStatList(CassandraObjectList):
objcls = DBDailyStat
class GameReporter(ConstructorModule):
def register(self):
self.rhook("queue-gen.schedule", self.schedule)
self.rhook("marketing.report", self.marketing_report)
self.rhook("config.changed", self.config_changed)
self.rhook("configs.store", self.configs_store)
self.rhook("objclasses.list", self.objclasses_list)
self.rhook("stats.daily", self.stats_daily)
def objclasses_list(self, objclasses):
objclasses["DailyStat"] = (DBDailyStat, DBDailyStatList)
def schedule(self, sched):
sched.add("marketing.report", "0 0 * * *", priority=20)
sched.add("configs.store", "50 23 * * *", priority=15)
def marketing_report(self):
since, till = self.yesterday_interval()
app_tag = self.app().tag
self.debug("a=%s: Preparing marketing report %s - %s", app_tag, since, till)
# mapping: character_id => player_id
character_players = {}
# mapping: character_id => [session_id, since]
characters_online = {}
# mapping: player_id => [N_of_online_characters, since]
players_online = {}
# mapping: session_id => character_id
sessions_online = {}
# list of characters currently online
online_lst = self.objlist(DBCharacterOnlineList, query_index="all")
# auth logs since yesterday
logs = self.objlist(AuthLogList, query_index="performed", query_start=since)
logs.load(silent=True)
# mapping: time => number. number is +1 when somebody online, -1 when offline
events = {}
# player_stats
player_stats = {}
# active players
active_players = set()
def player_stat(pl, s, t, comment):
st = unix_timestamp(s)
tt = unix_timestamp(t)
elapsed = tt - st
self.debug("a=%s: pl=%s online %s - %s (%d sec, %s)", app_tag, pl, s, t, elapsed, comment)
try:
player_stats[pl] += elapsed
except KeyError:
player_stats[pl] = elapsed
try:
events[st] += 1
except KeyError:
events[st] = 1
try:
events[tt] -= 1
except KeyError:
events[tt] = -1
for ent in logs:
performed = ent.get("performed")
act = ent.get("act")
char_uuid = ent.get("user")
player_uuid = ent.get("player")
session_uuid = ent.get("session")
active_players.add(player_uuid)
self.debug("a=%s %s char=%s, player=%s, sess=%s", performed, act, char_uuid, player_uuid, session_uuid)
if performed < till:
# actual date
went_online = False
went_offline = False
if char_uuid and player_uuid:
character_players[char_uuid] = player_uuid
# online events
if (act == "login" or act == "reconnect") and char_uuid and player_uuid:
try:
char = characters_online[char_uuid]
# character already online
if char[0] != session_uuid:
# session of the character changed
del sessions_online[char[0]]
sessions_online[session_uuid] = char_uuid
char[0] = session_uuid
except KeyError:
went_online = True
# offline events
if (act == "logout" or act == "disconnect") and char_uuid and player_uuid:
if not characters_online.get(char_uuid):
# logout without login. assuming login was at the "since" time
characters_online[char_uuid] = [session_uuid, since]
try:
players_online[player_uuid][0] += 1
except KeyError:
players_online[player_uuid] = [1, since]
went_offline = True
# log into cabinet
if act == "login" and player_uuid and not char_uuid:
try:
char_uuid = sessions_online[session_uuid]
char = characters_online[char_uuid]
except KeyError:
pass
else:
went_offline = True
#self.debug(" went_online=%s, went_offline=%s", went_online, went_offline)
# processing online/offline events
if went_online:
characters_online[char_uuid] = [session_uuid, performed]
try:
if players_online[player_uuid][0] == 0:
players_online[player_uuid][1] = performed
players_online[player_uuid][0] += 1
except KeyError:
players_online[player_uuid] = [1, performed]
sessions_online[session_uuid] = char_uuid
if went_offline:
char = characters_online[char_uuid]
try:
del sessions_online[char[0]]
except KeyError:
pass
try:
del characters_online[char_uuid]
except KeyError:
pass
try:
players_online[player_uuid][0] -= 1
except KeyError:
pass
else:
if players_online[player_uuid][0] == 0:
player_stat(player_uuid, players_online[player_uuid][1], performed, "regular")
#self.debug(" current characters_online=%s, players_online=%s, sessions_online=%s", characters_online, players_online, sessions_online)
else:
# the next day
if char_uuid and player_uuid and not character_players.get(char_uuid):
if act == "login" or act == "reconnect":
# this character first appeared in the logs on the next day with "login" event.
# it means he was offline yesterday
character_players[char_uuid] = player_uuid
if act == "logout" or act == "disconnect":
# this character first apparead in the logs on the next day with "logout" event.
# it means he was online yesterday all the day
character_players[char_uuid] = player_uuid
player_stat(player_uuid, since, till, "afterlog")
# getting characters online till the end of the day
for player_uuid, ent in players_online.iteritems():
if ent[0] > 0:
player_stat(player_uuid, ent[1], till, "endofday")
# looking for characters still online
for ent in online_lst:
char_uuid = ent.uuid
if not character_players.get(char_uuid):
# this character is still online and there were no mentions in logs about him
# it means that he was online yesterday all the day
player_uuid = self.character(char_uuid).player.uuid
active_players.add(player_uuid)
player_stat(player_uuid, since, till, "nolog")
# CCU analysis
since_ts = unix_timestamp(since)
last = None
ccu = 0
peak_ccu = 0
hours = [0] * 25
for time in sorted(events.keys()):
if last is not None:
hour_begin = (last - since_ts) / 3600
hour_end = (time - since_ts) / 3600
#self.debug("Interval %d - %d: ccu=%d, hour_begin=%d, hour_end=%d", last, time, ccu, hour_begin, hour_end)
if hour_begin == hour_end:
ratio = (time - last) / 3600.0
#self.debug("Hour %d gets %d * %f", hour_begin, ccu, ratio)
hours[hour_begin] += ccu * ratio
else:
ratio = (since_ts + (hour_begin + 1) * 3600 - last) / 3600.0
#self.debug("Hour %d gets %d * %f", hour_begin, ccu, ratio)
hours[hour_begin] += ccu * ratio
for hour in xrange(hour_begin + 1, hour_end):
#self.debug("Hour %d gets %d * 1.0", hour, ccu)
hours[hour] += ccu
ratio = (time - hour_end * 3600 - since_ts) / 3600.0
#self.debug("Hour %d gets %d * %f", hour_end, ccu, ratio)
hours[hour_end] += ccu * ratio
ccu += events[time]
if ccu > peak_ccu:
peak_ccu = ccu
last = time
#self.debug("CCU at %d = %d", time, ccu)
hours = [int(val) for val in hours[0:24]]
#self.debug("Distribution: %s", hours)
# loading list of newly registered players
lst = self.objlist(DBPlayerList, query_index="created", query_start=since, query_finish=till)
lst.load(silent=True)
registered = 0
for ent in lst:
if not ent.get("last_visit"):
ent.set("last_visit", till)
ent.set("active", 2)
registered += 1
lst.store()
# loading list of active players
returned = 0
if len(active_players):
lst = self.objlist(DBPlayerList, uuids=[uuid for uuid in active_players])
lst.load(silent=True)
for ent in lst:
ent.set("last_visit", till)
if ent.get("active") != 2:
ent.set("active", 2)
returned += 1
lst.store()<|fim▁hole|> lst = self.objlist(DBPlayerList, query_index="active", query_equal="2", query_finish=self.now(-86400 * 14))
lst.load(silent=True)
left = 0
for ent in lst:
if ent.get("active") == 2:
ent.set("active", 0)
left += 1
lst.store()
# loading currently active playerbase
lst = self.objlist(DBPlayerList, query_index="active", query_equal="2")
active = len(lst)
# loading list of new users on the index page
lst = self.objlist(DBFirstVisitList, query_index="all")
new_users = len(lst)
lst.remove()
# don't store information about abandoned games
if len(online_lst) or len(logs) or active > 0:
self.call("dbexport.add", "online", since=since, till=till, players=player_stats, peak_ccu=peak_ccu, ccu_dist=hours, registered=registered, returned=returned, left=left, active=active, new_users=new_users)
self.call("stats.daily", peak_ccu=peak_ccu, ccu_dist=hours, registered=registered, returned=returned, left=left, active=active, new_users=new_users)
def config_changed(self):
project = self.app().project
project.load()
project.set("config_updated", self.now())
project.store()
def configs_store(self):
project = self.app().project
project.load()
if not project.get("config_updated"):
return
self.debug("Storing config changes of the project %s", self.app().tag)
config = {}
lst = self.objlist(DBConfigGroupList, query_index="all")
lst.load(silent=True)
for ent in lst:
config[ent.uuid] = ent.data
self.call("dbexport.add", "config", config=config)
project.delkey("config_updated")
project.store()
def stats_daily(self, **kwargs):
now = self.nowdate()
with self.lock(["DailyStat"]):
obj = self.obj(DBDailyStat, now, silent=True)
for key, val in kwargs.iteritems():
obj.set(key, val)
obj.store()<|fim▁end|> | # loading list of active players that are really inactive for 14 days |
<|file_name|>container_operations_unix.go<|end_file_name|><|fim▁begin|>// +build linux freebsd
package daemon
import (
"context"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"time"
"github.com/docker/docker/container"
"github.com/docker/docker/daemon/links"
"github.com/docker/docker/pkg/idtools"
"github.com/docker/docker/pkg/mount"
"github.com/docker/docker/pkg/stringid"
"github.com/docker/docker/runconfig"
"github.com/docker/libnetwork"
"github.com/opencontainers/selinux/go-selinux/label"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"golang.org/x/sys/unix"
)
func (daemon *Daemon) setupLinkedContainers(container *container.Container) ([]string, error) {
var env []string
children := daemon.children(container)
bridgeSettings := container.NetworkSettings.Networks[runconfig.DefaultDaemonNetworkMode().NetworkName()]
if bridgeSettings == nil || bridgeSettings.EndpointSettings == nil {
return nil, nil
}
for linkAlias, child := range children {
if !child.IsRunning() {
return nil, fmt.Errorf("Cannot link to a non running container: %s AS %s", child.Name, linkAlias)
}
childBridgeSettings := child.NetworkSettings.Networks[runconfig.DefaultDaemonNetworkMode().NetworkName()]
if childBridgeSettings == nil || childBridgeSettings.EndpointSettings == nil {
return nil, fmt.Errorf("container %s not attached to default bridge network", child.ID)
}
link := links.NewLink(
bridgeSettings.IPAddress,
childBridgeSettings.IPAddress,
linkAlias,
child.Config.Env,
child.Config.ExposedPorts,
)
env = append(env, link.ToEnv()...)
}
return env, nil
}
func (daemon *Daemon) getIpcContainer(id string) (*container.Container, error) {
errMsg := "can't join IPC of container " + id
// Check the container exists
container, err := daemon.GetContainer(id)
if err != nil {
return nil, errors.Wrap(err, errMsg)
}
// Check the container is running and not restarting
if err := daemon.checkContainer(container, containerIsRunning, containerIsNotRestarting); err != nil {
return nil, errors.Wrap(err, errMsg)
}
// Check the container ipc is shareable
if st, err := os.Stat(container.ShmPath); err != nil || !st.IsDir() {
if err == nil || os.IsNotExist(err) {
return nil, errors.New(errMsg + ": non-shareable IPC")
}
// stat() failed?
return nil, errors.Wrap(err, errMsg+": unexpected error from stat "+container.ShmPath)
}
return container, nil
}
func (daemon *Daemon) getPidContainer(container *container.Container) (*container.Container, error) {
containerID := container.HostConfig.PidMode.Container()
container, err := daemon.GetContainer(containerID)
if err != nil {
return nil, errors.Wrapf(err, "cannot join PID of a non running container: %s", container.ID)
}
return container, daemon.checkContainer(container, containerIsRunning, containerIsNotRestarting)
}
func containerIsRunning(c *container.Container) error {
if !c.IsRunning() {
return stateConflictError{errors.Errorf("container %s is not running", c.ID)}
}
return nil
}
func containerIsNotRestarting(c *container.Container) error {
if c.IsRestarting() {
return errContainerIsRestarting(c.ID)
}
return nil
}
func (daemon *Daemon) setupIpcDirs(c *container.Container) error {
ipcMode := c.HostConfig.IpcMode
switch {
case ipcMode.IsContainer():
ic, err := daemon.getIpcContainer(ipcMode.Container())
if err != nil {
return err
}
c.ShmPath = ic.ShmPath
case ipcMode.IsHost():
if _, err := os.Stat("/dev/shm"); err != nil {
return fmt.Errorf("/dev/shm is not mounted, but must be for --ipc=host")
}
c.ShmPath = "/dev/shm"
case ipcMode.IsPrivate(), ipcMode.IsNone():
// c.ShmPath will/should not be used, so make it empty.
// Container's /dev/shm mount comes from OCI spec.
c.ShmPath = ""
case ipcMode.IsEmpty():
// A container was created by an older version of the daemon.
// The default behavior used to be what is now called "shareable".
fallthrough
case ipcMode.IsShareable():
rootIDs := daemon.idMappings.RootPair()
if !c.HasMountFor("/dev/shm") {
shmPath, err := c.ShmResourcePath()
if err != nil {
return err
}
if err := idtools.MkdirAllAndChown(shmPath, 0700, rootIDs); err != nil {
return err
}
shmproperty := "mode=1777,size=" + strconv.FormatInt(c.HostConfig.ShmSize, 10)
if err := unix.Mount("shm", shmPath, "tmpfs", uintptr(unix.MS_NOEXEC|unix.MS_NOSUID|unix.MS_NODEV), label.FormatMountLabel(shmproperty, c.GetMountLabel())); err != nil {
return fmt.Errorf("mounting shm tmpfs: %s", err)
}
if err := os.Chown(shmPath, rootIDs.UID, rootIDs.GID); err != nil {
return err
}
c.ShmPath = shmPath
}
default:
return fmt.Errorf("invalid IPC mode: %v", ipcMode)
}
return nil
}
func (daemon *Daemon) setupSecretDir(c *container.Container) (setupErr error) {
if len(c.SecretReferences) == 0 {
return nil
}
localMountPath := c.SecretMountPath()
logrus.Debugf("secrets: setting up secret dir: %s", localMountPath)
// retrieve possible remapped range start for root UID, GID
rootIDs := daemon.idMappings.RootPair()
// create tmpfs
if err := idtools.MkdirAllAndChown(localMountPath, 0700, rootIDs); err != nil {
return errors.Wrap(err, "error creating secret local mount path")
}
defer func() {
if setupErr != nil {
// cleanup
_ = detachMounted(localMountPath)
if err := os.RemoveAll(localMountPath); err != nil {
logrus.Errorf("error cleaning up secret mount: %s", err)
}
}
}()
tmpfsOwnership := fmt.Sprintf("uid=%d,gid=%d", rootIDs.UID, rootIDs.GID)
if err := mount.Mount("tmpfs", localMountPath, "tmpfs", "nodev,nosuid,noexec,"+tmpfsOwnership); err != nil {
return errors.Wrap(err, "unable to setup secret mount")
}
if c.DependencyStore == nil {
return fmt.Errorf("secret store is not initialized")
}
for _, s := range c.SecretReferences {
// TODO (ehazlett): use type switch when more are supported
if s.File == nil {
logrus.Error("secret target type is not a file target")
continue
}
// secrets are created in the SecretMountPath on the host, at a
// single level
fPath := c.SecretFilePath(*s)
if err := idtools.MkdirAllAndChown(filepath.Dir(fPath), 0700, rootIDs); err != nil {
return errors.Wrap(err, "error creating secret mount path")
}
logrus.WithFields(logrus.Fields{
"name": s.File.Name,
"path": fPath,
}).Debug("injecting secret")
secret, err := c.DependencyStore.Secrets().Get(s.SecretID)
if err != nil {
return errors.Wrap(err, "unable to get secret from secret store")
}
if err := ioutil.WriteFile(fPath, secret.Spec.Data, s.File.Mode); err != nil {
return errors.Wrap(err, "error injecting secret")
}
uid, err := strconv.Atoi(s.File.UID)
if err != nil {
return err
}
gid, err := strconv.Atoi(s.File.GID)
if err != nil {
return err
}
if err := os.Chown(fPath, rootIDs.UID+uid, rootIDs.GID+gid); err != nil {
return errors.Wrap(err, "error setting ownership for secret")
}
}
label.Relabel(localMountPath, c.MountLabel, false)
// remount secrets ro
if err := mount.Mount("tmpfs", localMountPath, "tmpfs", "remount,ro,"+tmpfsOwnership); err != nil {
return errors.Wrap(err, "unable to remount secret dir as readonly")
}
return nil
}
func (daemon *Daemon) setupConfigDir(c *container.Container) (setupErr error) {
if len(c.ConfigReferences) == 0 {
return nil
}
localPath := c.ConfigsDirPath()
logrus.Debugf("configs: setting up config dir: %s", localPath)
// retrieve possible remapped range start for root UID, GID
rootIDs := daemon.idMappings.RootPair()
// create tmpfs
if err := idtools.MkdirAllAndChown(localPath, 0700, rootIDs); err != nil {
return errors.Wrap(err, "error creating config dir")
}
defer func() {
if setupErr != nil {
if err := os.RemoveAll(localPath); err != nil {
logrus.Errorf("error cleaning up config dir: %s", err)
}
}
}()
if c.DependencyStore == nil {
return fmt.Errorf("config store is not initialized")
}
<|fim▁hole|> // TODO (ehazlett): use type switch when more are supported
if configRef.File == nil {
logrus.Error("config target type is not a file target")
continue
}
fPath := c.ConfigFilePath(*configRef)
log := logrus.WithFields(logrus.Fields{"name": configRef.File.Name, "path": fPath})
if err := idtools.MkdirAllAndChown(filepath.Dir(fPath), 0700, rootIDs); err != nil {
return errors.Wrap(err, "error creating config path")
}
log.Debug("injecting config")
config, err := c.DependencyStore.Configs().Get(configRef.ConfigID)
if err != nil {
return errors.Wrap(err, "unable to get config from config store")
}
if err := ioutil.WriteFile(fPath, config.Spec.Data, configRef.File.Mode); err != nil {
return errors.Wrap(err, "error injecting config")
}
uid, err := strconv.Atoi(configRef.File.UID)
if err != nil {
return err
}
gid, err := strconv.Atoi(configRef.File.GID)
if err != nil {
return err
}
if err := os.Chown(fPath, rootIDs.UID+uid, rootIDs.GID+gid); err != nil {
return errors.Wrap(err, "error setting ownership for config")
}
label.Relabel(fPath, c.MountLabel, false)
}
return nil
}
func killProcessDirectly(cntr *container.Container) error {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
// Block until the container to stops or timeout.
status := <-cntr.Wait(ctx, container.WaitConditionNotRunning)
if status.Err() != nil {
// Ensure that we don't kill ourselves
if pid := cntr.GetPID(); pid != 0 {
logrus.Infof("Container %s failed to exit within 10 seconds of kill - trying direct SIGKILL", stringid.TruncateID(cntr.ID))
if err := unix.Kill(pid, 9); err != nil {
if err != unix.ESRCH {
return err
}
e := errNoSuchProcess{pid, 9}
logrus.Debug(e)
return e
}
}
}
return nil
}
func detachMounted(path string) error {
return unix.Unmount(path, unix.MNT_DETACH)
}
func isLinkable(child *container.Container) bool {
// A container is linkable only if it belongs to the default network
_, ok := child.NetworkSettings.Networks[runconfig.DefaultDaemonNetworkMode().NetworkName()]
return ok
}
func enableIPOnPredefinedNetwork() bool {
return false
}
func (daemon *Daemon) isNetworkHotPluggable() bool {
return true
}
func setupPathsAndSandboxOptions(container *container.Container, sboxOptions *[]libnetwork.SandboxOption) error {
var err error
container.HostsPath, err = container.GetRootResourcePath("hosts")
if err != nil {
return err
}
*sboxOptions = append(*sboxOptions, libnetwork.OptionHostsPath(container.HostsPath))
container.ResolvConfPath, err = container.GetRootResourcePath("resolv.conf")
if err != nil {
return err
}
*sboxOptions = append(*sboxOptions, libnetwork.OptionResolvConfPath(container.ResolvConfPath))
return nil
}
func (daemon *Daemon) initializeNetworkingPaths(container *container.Container, nc *container.Container) error {
container.HostnamePath = nc.HostnamePath
container.HostsPath = nc.HostsPath
container.ResolvConfPath = nc.ResolvConfPath
return nil
}<|fim▁end|> | for _, configRef := range c.ConfigReferences { |
<|file_name|>db.py<|end_file_name|><|fim▁begin|><|fim▁hole|>db.create_all()<|fim▁end|> | from core import db
from auth import models
from admin import models
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# hash-ring-ctypes documentation build configuration file, created by
# sphinx-quickstart on Wed Oct 2 18:10:26 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'hash-ring-ctypes'
copyright = u'2013, Matt Dennewitz'<|fim▁hole|># built documents.
#
# The short X.Y version.
version = '1.0.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'hash-ring-ctypesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto/manual]).
latex_documents = [
('index', 'hash-ring-ctypes.tex', u'hash-ring-ctypes Documentation',
u'Matt Dennewitz', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'hash-ring-ctypes', u'hash-ring-ctypes Documentation',
[u'Matt Dennewitz'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'hash-ring-ctypes', u'hash-ring-ctypes Documentation',
u'Matt Dennewitz', 'hash-ring-ctypes', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> |
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the |
<|file_name|>SlowBullet.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package com.syntacticsugar.vooga.gameplayer.objects.items.bullets;
import com.syntacticsugar.vooga.gameplayer.event.implementations.SlowEvent;
import com.syntacticsugar.vooga.gameplayer.objects.GameObjectType;
public class SlowBullet extends AbstractBullet {
public SlowBullet(BulletParams params, double speedDecrease, int time) {
super(params);
SlowEvent slow = new SlowEvent(speedDecrease, time);
addCollisionBinding(GameObjectType.ENEMY, slow);
}
}<|fim▁end|> | |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for websqlrunner project.
It exposes the WSGI callable as a module-level variable named ``application``.<|fim▁hole|>For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "websqlrunner.settings")
application = get_wsgi_application()<|fim▁end|> | |
<|file_name|>FaceDetector.py<|end_file_name|><|fim▁begin|>import cv2
import numpy as np
import os
from vilay.core.Descriptor import MediaTime, Shape
from vilay.detectors.IDetector import IDetector
from vilay.core.DescriptionScheme import DescriptionScheme
class FaceDetector(IDetector):
def getName(self):
return "Face Detector"
def initialize(self):
# define haar-detector file
print os.getcwd() + '/vilay/detectors/FaceDetector/haarcascade_frontalface_default.xml'
self.cascade = cv2.CascadeClassifier(os.getcwd() + '/vilay/detectors/FaceDetector/haarcascade_frontalface_default.xml')
def detect(self, mediaTimes, tgtDS, film, rootDS, mainGUI):
for mediaTime in mediaTimes:
for frameIdx in range(mediaTime.startTime, mediaTime.startTime + mediaTime.duration):
actFrame = film.getFrame(frameIdx)
<|fim▁hole|>
# detect faces
faces = self.cascade.detectMultiScale(actFrame, 1.2, 3, 0, (5,5))
# create ds and add time and shape descriptor
for faceIdx in range(len(faces)):
[x,y,width,height] = faces[faceIdx,:]
ds = DescriptionScheme('RTI', 'Face Detector')
region = Shape('Face Detector','rect', np.array([[x, y], [x + width, y + height]]))
mediaTime = MediaTime('Face Detector', frameIdx, 1)
tgtDS.addDescriptionScheme(ds)
ds.addDescriptor(region)
ds.addDescriptor(mediaTime)<|fim▁end|> | # preprocessing
actFrame = cv2.cvtColor(actFrame, cv2.cv.CV_BGR2GRAY)
actFrame = cv2.equalizeHist(actFrame) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># from index import db
# class MyObject():
# def __init__(self):
# pass
# @staticmethod<|fim▁hole|><|fim▁end|> | # def get_something(arg1, arg2):
# return something |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'FormPlugin'
db.create_table(u'cmsplugin_formplugin', (
(u'cmsplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['cms.CMSPlugin'], unique=True, primary_key=True)),
('form_class', self.gf('django.db.models.fields.CharField')(max_length=200)),
('success_url', self.gf('django.db.models.fields.URLField')(max_length=200, null=True)),
('post_to_url', self.gf('django.db.models.fields.URLField')(max_length=200)),
))
db.send_create_signal(u'cms_form_plugin', ['FormPlugin'])<|fim▁hole|> def backwards(self, orm):
# Deleting model 'FormPlugin'
db.delete_table(u'cmsplugin_formplugin')
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'cms_form_plugin.formplugin': {
'Meta': {'object_name': 'FormPlugin', 'db_table': "u'cmsplugin_formplugin'", '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'form_class': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'post_to_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'success_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['cms_form_plugin']<|fim▁end|> | |
<|file_name|>invalid_range_index_example.py<|end_file_name|><|fim▁begin|>for i in range(0):
i += 1
for j in range(0, 1, 3):<|fim▁hole|>
for n in range(0, 1.1): # Error on this line
n += 1
for m in range(4, 5):
m += 1<|fim▁end|> | j += 1
for k in range(9, 1, -9):
k += 1 |
<|file_name|>move-scalar.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
let y: int = 42;
let mut x: int;
x = y;
assert_eq!(x, 42);
}<|fim▁end|> | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. |
<|file_name|>list.js<|end_file_name|><|fim▁begin|>import Omi from 'omi/dist/omi'
import { CellsTitle, Cells, CellHeader, CellBody, CellFooter } from '../cell'
Omi.makeHTML('CellsTitle', CellsTitle);
Omi.makeHTML('Cells', Cells);<|fim▁hole|>export default class List extends Omi.Component{
constructor(data) {
super(data);
}
render(){
return `
<div>
<CellsTitle data-title={{title}} />
<Cells slot-index="0">
<div>
{{#items}}
<{{#link}}a href={{link}} {{/link}}{{^link}}div{{/link}} class="weui-cell {{#link}}weui-cell_access{{/link}}">
{{#imageUrl}}
<CellHeader>
<img style="width:20px;margin-right:5px;display:block" src={{imageUrl}} />
</CellHeader>
{{/imageUrl}}
<CellBody slot-index="0" >
<p>{{{title}}}</p>
</CellBody>
<CellFooter slot-index="1">
<span>{{value}}</span>
</CellFooter>
</{{#link}}a{{/link}}{{^link}}div{{/link}}>
{{/items}}
</div>
</Cells>
</div>
`;
}
}<|fim▁end|> | Omi.makeHTML('CellHeader', CellHeader);
Omi.makeHTML('CellBody', CellBody);
Omi.makeHTML('CellFooter', CellFooter);
|
<|file_name|>mat.rs<|end_file_name|><|fim▁begin|>use std::ops::{Add, Sub};
#[derive(Clone, Copy, Debug, PartialEq)]
pub struct Vec3 {
pub x: f32,
pub y: f32,
pub z: f32
}
impl Add for Vec3 {<|fim▁hole|> x: self.x + rhs.x,
y: self.y + rhs.y,
z: self.z + rhs.z
}
}
}
impl Sub for Vec3 {
type Output = Vec3;
fn sub(self, rhs: Vec3) -> Vec3 {
Vec3 {
x: self.x - rhs.x,
y: self.y - rhs.y,
z: self.z - rhs.z
}
}
}
impl Vec3 {
pub fn zero() -> Vec3 { Vec3 { x: 0.0, y: 0.0, z: 0.0 } }
pub fn x() -> Vec3 { Vec3 { x: 1.0, .. Vec3::zero() } }
pub fn y() -> Vec3 { Vec3 { y: 1.0, .. Vec3::zero() } }
pub fn z() -> Vec3 { Vec3 { z: 1.0, .. Vec3::zero() } }
pub fn scale(&self, factor: f32) -> Vec3 {
Vec3 {
x: self.x * factor,
y: self.y * factor,
z: self.z * factor
}
}
pub fn norm(&self) -> f32 {
self.dot(&self).sqrt()
}
pub fn unit(&self) -> Vec3 {
self.scale(1.0 / self.norm())
}
pub fn dot(&self, rhs: &Vec3) -> f32 {
self.x * rhs.x + self.y * rhs.y + self.z * rhs.z
}
pub fn cross(&self, rhs: &Vec3) -> Vec3 {
Vec3 {
x: self.y * rhs.z - self.z * rhs.y,
y: self.z * rhs.x - self.x * rhs.z,
z: self.x * rhs.y - self.y * rhs.x
}
}
}
#[test]
fn check_vec() {
let x = Vec3::x();
let y = Vec3::y();
let z = Vec3::z();
assert_eq!(x.dot(&y), 0.0);
assert_eq!(y.dot(&z), 0.0);
assert_eq!(x.cross(&y), z);
assert_eq!(y.cross(&z), x);
assert_eq!(z.cross(&x), y);
}
pub fn solve_quadratic(a: f32, b: f32, c: f32) -> Option<(f32,f32)> {
let det = b*b - 4.0*a*c;
if 0.0 <= det {
let detrt = det.sqrt();
Some(((-b + detrt) / (2.0 * a),
(-b - detrt) / (2.0 * a)))
} else { None }
}
#[test]
fn check_quadratic() {
assert_eq!(solve_quadratic(1.0, -2.0, -3.0), Some((3.0, -1.0)));
assert_eq!(solve_quadratic(4.0, -24.0, 36.0), Some((3.0, 3.0)));
}
/// Angle stored in radians
#[derive(Debug, PartialEq)]
pub struct Angle(f32);
const RAD_TO_DEG: f32 = 180.0 / ::std::f32::consts::PI;
impl Angle {
pub fn from_degrees(x: f32) -> Angle { Angle(x / RAD_TO_DEG) }
pub fn from_radians(x: f32) -> Angle { Angle(x) }
pub fn rad(&self) -> f32 { self.0 }
pub fn deg(&self) -> f32 { self.rad() * RAD_TO_DEG }
}
#[test]
fn check_angle() {
assert_eq!(Angle::from_degrees(360.0).rad(), 2.0 * ::std::f32::consts::PI);
assert_eq!(Angle::from_radians(::std::f32::consts::PI).deg(), 180.0);
assert_eq!(Angle::from_radians(::std::f32::consts::PI / 2.0), Angle::from_degrees(90.0));
}<|fim▁end|> | type Output = Vec3;
fn add(self, rhs: Vec3) -> Vec3 {
Vec3 { |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>export * from './TitusCapacityDetailsSection';
export * from './TitusSecurityGroups';
export * from './resize/TitusResizeServerGroupModal';<|fim▁hole|><|fim▁end|> | export * from './scalingActivity/TitusScalingActivitiesModal';
export * from './scalingPolicy'; |
<|file_name|>source.rs<|end_file_name|><|fim▁begin|>use check_file;
use ffprobe;
use path;
use std::cmp::Ordering;
use std::error::Error as StdError;
use std::fmt;
use std::io;
use std::iter::IntoIterator;
use std::ops::{Deref, DerefMut};
use std::path::PathBuf;
#[derive(Debug)]
pub enum Error {
CheckFileError(check_file::Error),
FFProbeError {
path: PathBuf,
error: ffprobe::Error,
},
PathError { path: PathBuf, error: io::Error },
SourceDirectory { path: PathBuf, error: io::Error },
StraySource { path: PathBuf },
}
impl From<check_file::Error> for Error {
fn from(err: check_file::Error) -> Self {
Error::CheckFileError(err)
}
}
impl StdError for Error {
fn description(&self) -> &str {
match *self {
Error::CheckFileError(_) => "Error happened while checking file",
Error::FFProbeError { .. } => "FFProbe error",
Error::PathError { .. } => "Could not expand path",
Error::SourceDirectory { .. } => "Error happened while resolving INPUT_DIRECTORY",
Error::StraySource { .. } => "Path cannot be outside INPUT_DIRECTORY",
}
}
fn cause(&self) -> Option<&StdError> {
match *self {
Error::CheckFileError(ref error) => Some(error),
Error::FFProbeError { ref error, .. } => Some(error),
Error::PathError { ref error, .. } => Some(error),
Error::SourceDirectory { ref error, .. } => Some(error),
Error::StraySource { .. } => None,
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::PathError { ref path, .. } |
Error::FFProbeError { ref path, .. } |
Error::SourceDirectory { ref path, .. } |
Error::StraySource { ref path, .. } => {
write!(f,
"{desc}: {path:?}",
desc = self.description(),
path = path)
}
Error::CheckFileError(_) => write!(f, "{}", self.description()),
}
}
}
type SourceResult<T> = Result<T, Error>;
#[derive(Debug, Clone)]
pub struct BasedPath {
pub path: PathBuf,
pub base: PathBuf,
}
impl BasedPath {
pub fn relative(&self) -> PathBuf {
path::find_relative(&self.path, &self.base)
}
}
#[derive(Debug, Clone)]
pub struct Source {
pub path: BasedPath,
pub ffprobe: ffprobe::FFProbe,
}
impl Ord for Source {
fn cmp(&self, other: &Source) -> Ordering {
self.path.path.cmp(&other.path.path)
}
}
impl Eq for Source {}
impl PartialOrd for Source {
fn partial_cmp(&self, other: &Source) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for Source {
fn eq(&self, other: &Source) -> bool {
self.path.path == other.path.path
}
}
#[derive(Debug)]
pub struct Sources(Vec<Source>);
impl IntoIterator for Sources {
type Item = Source;
type IntoIter = ::std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl Deref for Sources {
type Target = [Source];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Sources {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl Deref for BasedPath {
type Target = PathBuf;
fn deref(&self) -> &Self::Target {
&self.path
}
}
impl Sources {
pub fn from_paths<'a, T, U>(paths: T,
base_directory: &'a str)
-> SourceResult<(Self, Vec<BasedPath>)>
where T: IntoIterator<Item = U>,
U: Into<PathBuf>
{
let base_directory = PathBuf::from(base_directory);
let base_directory = match base_directory.canonicalize() {
Ok(dir) => dir,
Err(e) => {
return Err(Error::SourceDirectory {
path: base_directory,
error: e,
})
}
};
let paths: Result<Vec<_>, Error> = paths.into_iter()
.map(|x| x.into())
.map(canonicalize)
.collect();
let paths = try!(paths);
if let Some(path) = paths.iter().filter(|&path| !path.starts_with(&base_directory)).next() {
return Err(Error::StraySource { path: path.clone() });
}
let mut expanded_paths: Vec<PathBuf> = paths.into_iter()
.flat_map(expand_path)
.collect();
expanded_paths.sort();
expanded_paths.dedup();
// Quick filtering using 'file'
let (paths, skipped_file) = try!(check_file::check_files(expanded_paths.into_iter()));
let skipped_file = skipped_file.into_iter().map(|p| {
BasedPath {
path: p,
base: base_directory.clone(),
}
});
let sources: Result<Vec<_>, Error> = paths.into_iter()
.map(|path| ffprobe_it(&path).map(|probe| (path, probe)))
.collect();
let (good, skipped_ffprobe): (Vec<_>, Vec<_>) =
try!(sources).into_iter().partition(|&(_, ref probe)| probe.is_some());
let good = good.into_iter().filter_map(|(path, probe)| {
probe.map(|probe| {
Source {
ffprobe: probe,
path: BasedPath {
path: path,
base: base_directory.clone(),
},
}
})
});
let skipped_ffprobe = skipped_ffprobe.into_iter().map(|(path, _)| {
BasedPath {
path: path,
base: base_directory.clone(),
}
});
Ok((Sources(good.collect()), skipped_file.chain(skipped_ffprobe).collect()))
}
}
fn canonicalize(path: PathBuf) -> Result<PathBuf, Error> {<|fim▁hole|> match path.canonicalize() {
Err(e) => {
Err(Error::PathError {
error: e,
path: path,
})
}
Ok(p) => Ok(p),
}
}
fn expand_path(path: PathBuf) -> Vec<PathBuf> {
use path::{RecursivePathIterator, PathType};
let paths: Vec<PathBuf> = match path.is_dir() {
false => vec![path],
true => {
RecursivePathIterator::new(path)
.filter_map(|x| {
match x {
PathType::Directory(_) => None,
PathType::File(p) => Some(p),
}
})
.collect()
}
};
return paths;
}
fn ffprobe_it(path: &PathBuf) -> SourceResult<Option<ffprobe::FFProbe>> {
use self::Error::FFProbeError;
let res = ffprobe::ffprobe(path);
match res {
Err(e) => {
Err(FFProbeError {
path: path.to_owned(),
error: e,
})
}
Ok(r) => Ok(r),
}
}<|fim▁end|> | |
<|file_name|>config-wiki.post.json.js<|end_file_name|><|fim▁begin|><import resource="classpath:alfresco/site-webscripts/org/alfresco/callutils.js">
if (!json.isNull("wikipage"))
{
var wikipage = String(json.get("wikipage"));
model.pagecontent = getPageText(wikipage);
model.title = wikipage.replace(/_/g, " ");
}
else
{
model.pagecontent = "<h3>" + msg.get("message.nopage") + "</h3>";
model.title = "";
}
function getPageText(wikipage)
{
var c = sitedata.getComponent(url.templateArgs.componentId);
c.properties["wikipage"] = wikipage;
c.save();<|fim▁hole|>
var siteId = String(json.get("siteId"));
var uri = "/slingshot/wiki/page/" + siteId + "/" + encodeURIComponent(wikipage) + "?format=mediawiki";
var connector = remote.connect("alfresco");
var result = connector.get(uri);
if (result.status == status.STATUS_OK)
{
/**
* Always strip unsafe tags here.
* The config to option this is currently webscript-local elsewhere, so this is the safest option
* until the config can be moved to share-config scope in a future version.
*/
return stringUtils.stripUnsafeHTML(result.response);
}
else
{
return "";
}
}<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Compute API documentation build configuration file
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['openstackdocstheme']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Compute API Guide'
bug_tag = u'api-guide'
repository_name = 'openstack/nova'
bug_project = 'nova'
# Must set this variable to include year, month, day, hours, and minutes.
html_last_updated_fmt = '%Y-%m-%d %H:%M'
copyright = u'2015, OpenStack contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '2.1.0'
# The full version, including alpha/beta/rc tags.
release = '2.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = [openstackdocstheme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%Y-%m-%d %H:%M'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'compute-api-guide'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ComputeAPI.tex', u'Compute API Documentation',
u'OpenStack contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'computeapi', u'Compute API Documentation',
[u'OpenStack contributors'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ComputeAPIGuide', u'Compute API Guide',
u'OpenStack contributors', 'APIGuide',
'This guide teaches OpenStack Compute service users concepts about '
'managing resources in an OpenStack cloud with the Compute API.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
# -- Options for PDF output --------------------------------------------------
pdf_documents = [
('index', u'ComputeAPIGuide', u'Compute API Guide', u'OpenStack '
'contributors')
]
<|fim▁hole|> 'nova',
]<|fim▁end|> | # -- Options for openstackdocstheme -------------------------------------------
openstack_projects = [ |
<|file_name|>huawei_lte.py<|end_file_name|><|fim▁begin|>"""
Support for Huawei LTE routers.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/huawei_lte/
"""
from datetime import timedelta
from functools import reduce
import logging
import operator
import voluptuous as vol
import attr
from homeassistant.const import (
CONF_URL, CONF_USERNAME, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.helpers import config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['huawei-lte-api==1.0.12']
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=10)
DOMAIN = 'huawei_lte'
DATA_KEY = 'huawei_lte'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.All(cv.ensure_list, [vol.Schema({
vol.Required(CONF_URL): cv.url,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
})])
}, extra=vol.ALLOW_EXTRA)
@attr.s
class RouterData:
"""Class for router state."""
client = attr.ib()
device_information = attr.ib(init=False, factory=dict)
device_signal = attr.ib(init=False, factory=dict)
traffic_statistics = attr.ib(init=False, factory=dict)
wlan_host_list = attr.ib(init=False, factory=dict)
def __getitem__(self, path: str):
"""
Get value corresponding to a dotted path.
The first path component designates a member of this class
such as device_information, device_signal etc, and the remaining
path points to a value in the member's data structure.
"""
root, *rest = path.split(".")
try:
data = getattr(self, root)
except AttributeError as err:
raise KeyError from err
return reduce(operator.getitem, rest, data)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self) -> None:
"""Call API to update data."""
self.device_information = self.client.device.information()
_LOGGER.debug("device_information=%s", self.device_information)
self.device_signal = self.client.device.signal()
_LOGGER.debug("device_signal=%s", self.device_signal)
self.traffic_statistics = self.client.monitoring.traffic_statistics()
_LOGGER.debug("traffic_statistics=%s", self.traffic_statistics)
self.wlan_host_list = self.client.wlan.host_list()
_LOGGER.debug("wlan_host_list=%s", self.wlan_host_list)
@attr.s
class HuaweiLteData:
"""Shared state."""
data = attr.ib(init=False, factory=dict)
def get_data(self, config):
"""Get the requested or the only data value."""
if CONF_URL in config:
return self.data.get(config[CONF_URL])
if len(self.data) == 1:
return next(iter(self.data.values()))
return None<|fim▁hole|>
def setup(hass, config) -> bool:
"""Set up Huawei LTE component."""
if DATA_KEY not in hass.data:
hass.data[DATA_KEY] = HuaweiLteData()
for conf in config.get(DOMAIN, []):
_setup_lte(hass, conf)
return True
def _setup_lte(hass, lte_config) -> None:
"""Set up Huawei LTE router."""
from huawei_lte_api.AuthorizedConnection import AuthorizedConnection
from huawei_lte_api.Client import Client
url = lte_config[CONF_URL]
username = lte_config[CONF_USERNAME]
password = lte_config[CONF_PASSWORD]
connection = AuthorizedConnection(
url,
username=username,
password=password,
)
client = Client(connection)
data = RouterData(client)
data.update()
hass.data[DATA_KEY].data[url] = data
def cleanup(event):
"""Clean up resources."""
client.user.logout()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup)<|fim▁end|> | |
<|file_name|>TestTryTest.java<|end_file_name|><|fim▁begin|>package com.caozeal.practice;
import static org.assertj.core.api.Assertions.*;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.firefox.FirefoxDriver;
public class TestTryTest {
// @OdevityMain2
// public void seleniumTest(){
// WebDriver driver = new FirefoxDriver();
// driver.get("http://www.baidu.com");
//// WebElement query = driver.findElement(By.name("search"));
//// query.sendKeys("傲然绝唳的测试");
////
//// WebElement goButton = driver.findElement(By.name("go"));
//// goButton.click();<|fim▁hole|>//// assertThat(driver.getTitle()).startsWith("傲然绝唳的测试");
// driver.quit();
// }
}<|fim▁end|> | //// |
<|file_name|>ddate.rs<|end_file_name|><|fim▁begin|>/// Enum containing all discordian Days, including StTibsDay
#[derive(Debug,PartialEq)]
enum Day {
Sweetmorn,
Boomtime,
Pungenday,
PricklePrickle,
SettingOrange,
StTibsDay,
}
/// Enum containing all discordian Seasons, including StTibsDay
#[derive(Debug,PartialEq)]
enum Season {
Chaos,
Discord,
Confusion,
Bureaucracy,
TheAftermath,
StTibsDay,
}
/// Representation for a Discordian Date
#[derive(Debug,PartialEq)]
pub struct DiscordianDate {
/// Season of the discordian year
season: Season,
/// Day of the discordian Season, zero-based
day: u8,
/// Day of the discordian year, zero-based
year_day: u16,
/// Discordian year, which includes a year zero
year: i32,
/// Day of the discordian week
week_day: Day,
/// Week of the discordian year, or None for StTibsDay
week: Option<u8>,
}
/// Converts a year and day to a Discordian Date
///
/// # Arguments
/// * `nday` - Days after January 1st, starting at zero
/// * `nyear` - Astronomicaly numbered year. This means there is a year zero
///
pub fn convert(nday: u16, nyear: i32) -> Option<DiscordianDate> {
let year = nyear + 1166;
let year_day = nday;
if !is_leap_year(nyear) {
let season = match nday {
0 ... 72 => Season::Chaos,
73 ... 145 => Season::Discord,
146 ... 218 => Season::Confusion,
219 ... 291 => Season::Bureaucracy,
292 ... 364 => Season::TheAftermath,
_ => panic!("Day out of range: {}", nday)
};
let week_day = week_day(nday);
let day = (nday % 73) as u8;
let week = Some((nday / 5) as u8);
return Some(DiscordianDate {season: season, day: day,
year_day: year_day, year: year,
week: week, week_day: week_day})
} else {
let season = match nday {
59 => Season::StTibsDay,
0 ... 73 => Season::Chaos,
74 ... 146 => Season::Discord,
147 ... 219 => Season::Confusion,
220 ... 292 => Season::Bureaucracy,
293 ... 365 => Season::TheAftermath,
_ => panic!("Day out of range: {}", nday)
};
let week_day = match nday {
0 ... 58 => week_day(nday),
59 => Day::StTibsDay,
60 ... 365 => week_day(nday - 1),
_ => panic!("Day out of range: {}", nday)
};
let day = match nday {
0 ... 58 => nday,
59 => 0,
60 ... 365 => (nday - 1) % 73,
_ => panic!("Day out of range: {}", nday)
} as u8;
let week = match nday {
0 ... 58 => Some((nday / 5) as u8),
59 => None,
60 ... 365 => Some(((nday - 1) / 5) as u8),
_ => panic!("Day out of range: {}", nday)
};
return Some(DiscordianDate {season: season, day: day,
year_day: year_day, year: year,
week: week, week_day: week_day})
}
}
/// Return the weekday for a given day in the discordian year
///
/// This function will not correct for StTibsDay. All dates after StTibsDay
/// need to be reduced by one.
///
/// # Arguments
/// * `nday` - Days after January 1st, starting at zero
///
fn week_day(nday: u16) -> Day{
match nday % 5 {
0 => Day::Sweetmorn,
1 => Day::Boomtime,
2 => Day::Pungenday,
3 => Day::PricklePrickle,
4 => Day::SettingOrange,
_ => panic!("Weekday out of range: {}", nday % 5)
}
}
/// Determines if the supplied year is a leap year
///
/// There is a year zero before year one. But the result of the
/// leap year calculation is undefined before the switch to the
/// gregorian calendar (1582 CE)
///
/// # Arguments
/// * `year` - Astronomicaly numbered year. This means there is a year zero
///
fn is_leap_year(year: i32) -> bool {
let has_factor = |n| year % n == 0;
return has_factor(4) && !has_factor(100) || has_factor(400)
}
#[cfg(test)]
mod test {
#[test]
fn test_convert() {
assert_eq!(super::DiscordianDate {season: super::Season::Chaos,
day: 0, year_day: 0, year: 3182,
week: Some(0), week_day: super::Day::Sweetmorn},
super::convert(0, 2016).unwrap());
assert_eq!(super::DiscordianDate {season: super::Season::Chaos,
day: 0, year_day: 0, year: 1166,
week: Some(0), week_day: super::Day::Sweetmorn},
super::convert(0, 0).unwrap());
assert_eq!(super::DiscordianDate {season: super::Season::Chaos,
day: 0, year_day: 0, year: 1165,
week: Some(0), week_day: super::Day::Sweetmorn},
super::convert(0, -1).unwrap());
assert_eq!(super::DiscordianDate {season: super::Season::Chaos,
day: 0, year_day: 0, year: 0,
week: Some(0), week_day: super::Day::Sweetmorn},
super::convert(0, -1166).unwrap());
assert_eq!(super::DiscordianDate {season: super::Season::Chaos,
day: 0, year_day: 0, year: -1,
week: Some(0), week_day: super::Day::Sweetmorn},
super::convert(0, -1167).unwrap());
assert_eq!(super::DiscordianDate {season: super::Season::StTibsDay,
day: 0, year_day: 59, year: 3166,
week: None, week_day: super::Day::StTibsDay},
super::convert(59, 2000).unwrap());
assert_eq!(super::DiscordianDate {season: super::Season::Chaos,
day: 59, year_day: 60, year: 3166,
week: Some(11), week_day: super::Day::SettingOrange},
super::convert(60, 2000).unwrap());
assert_eq!(super::DiscordianDate {season: super::Season::Discord,
day: 11, year_day: 85, year: 3166,
week: Some(16), week_day: super::Day::SettingOrange},
super::convert(85, 2000).unwrap());
assert_eq!(super::DiscordianDate {season: super::Season::TheAftermath,
day: 72, year_day: 365, year: 3166,
week: Some(72), week_day: super::Day::SettingOrange},
super::convert(365, 2000).unwrap());
}
#[test]
fn test_week_day() {
assert_eq!(super::week_day(0), super::Day::Sweetmorn);
assert_eq!(super::week_day(1), super::Day::Boomtime);
assert_eq!(super::week_day(2), super::Day::Pungenday);
assert_eq!(super::week_day(3), super::Day::PricklePrickle);
assert_eq!(super::week_day(4), super::Day::SettingOrange);
assert_eq!(super::week_day(10), super::Day::Sweetmorn);
assert_eq!(super::week_day(12), super::Day::Pungenday);
assert_eq!(super::week_day(21), super::Day::Boomtime);
}
#[test]
fn test_leap_year_positive() {
assert!(super::is_leap_year(2004));
assert!(super::is_leap_year(2008));
assert!(super::is_leap_year(2012));
assert!(super::is_leap_year(2016));
}
#[test]
fn test_leap_year_century() {
assert!(super::is_leap_year(2000));
assert!(!super::is_leap_year(1900));
assert!(!super::is_leap_year(1800));
assert!(!super::is_leap_year(2100));
}<|fim▁hole|> assert!(!super::is_leap_year(1998));
assert!(!super::is_leap_year(1999));
assert!(!super::is_leap_year(2014));
assert!(!super::is_leap_year(2015));
}
}<|fim▁end|> |
#[test]
fn test_leap_year_negative() { |
<|file_name|>uniq.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for uniq
// Project: https://www.npmjs.com/package/uniq
// Definitions by: Hans Windhoff <https://github.com/hansrwindhoff>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
<|fim▁hole|>
declare var uniq :Uniq;
declare module "uniq" {
export = uniq;
}<|fim▁end|> |
interface Uniq{
<T>(ip:Array<T>): Array<T>;
} |
<|file_name|>macgen.py<|end_file_name|><|fim▁begin|># macgen.py script to generate a MAC address for Red Hat Virtualization guests
#
# from http://www.linux-kvm.com/sites/default/files/macgen.py
import random
def randomMAC():
mac = [ 0x00, 0x16, 0x3e,<|fim▁hole|><|fim▁end|> | random.randint(0x00, 0x7f),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff) ]
return ':'.join(map(lambda x: "%02x" % x, mac)) |
<|file_name|>example_client.py<|end_file_name|><|fim▁begin|># Copyright (C) 2016 Robert Scott
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import shutil, os
from client import SpreadsheetClient
if __name__ == "__main__":
"""This script shows how the differnet functions exposed by client.py can be
used."""
EXAMPLE_SPREADSHEET = "example.ods"
# Copy the example spreadsheet from the tests directory into the spreadsheets
# directory
shutil.copyfile(
os.path.join("tests", EXAMPLE_SPREADSHEET),
os.path.join("spreadsheets", EXAMPLE_SPREADSHEET)
)
SHEET_NAME = "Sheet1"
print("Waiting for the example spreadsheet to be scanned and loaded into LibreOffice.")
sc = SpreadsheetClient(EXAMPLE_SPREADSHEET)
# Get sheet names
sheet_names = sc.get_sheet_names()
print(sheet_names)
# Set a cell value
sc.set_cells(SHEET_NAME, "A1", 5)
# Retrieve a cell value.
cell_value = sc.get_cells(SHEET_NAME, "C3")
print(cell_value)
# Set a one dimensional cell range.
# Cells are set using the format: [A1, A2, A3]
cell_values = [1, 2, 3]
sc.set_cells(SHEET_NAME, "A1:A3", cell_values)
# Retrieve one dimensional cell range.
cell_values = sc.get_cells(SHEET_NAME, "C1:C3")
print(cell_values)
<|fim▁hole|> # Cells are set using the format: [[A1, B1, C1], [A2, B2, C2], [A3, B3, C3]]
cell_values = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
sc.set_cells(SHEET_NAME, "A1:C3", cell_values)
# Retrieve a two dimensional cell range.
cell_values = sc.get_cells(SHEET_NAME, "A1:C3")
print(cell_values)
# Save a spreadsheet - it will save into ./saved_spreadsheets
sc.save_spreadsheet(EXAMPLE_SPREADSHEET)
sc.disconnect()
os.remove(os.path.join("spreadsheets", EXAMPLE_SPREADSHEET))<|fim▁end|> | # Set a two dimensional cell range. |
<|file_name|>web_rootMenu.py<|end_file_name|><|fim▁begin|>"""
@name: PyHouse/Project/src/Modules/Computer/Web/web_rootMenu.py
@author: D. Brian Kimmel
@contact: [email protected]
@copyright: (c) 2013-2019 by D. Brian Kimmel
@license: MIT License
@note: Created on May 30, 2013
@summary: Handle the Main menu.
"""
__updated__ = '2019-10-31'
# Import system type stuff
from twisted.web._element import renderer, Element
# Import PyMh files and modules.
from Modules.Core import logging_pyh as Logger
LOG = Logger.getLogger('PyHouse.webRootMenu ')
class RootMenuElement(Element):
"""<|fim▁hole|> """
# docFactory = loaders.xmlfile(os.path.join(templatepath, 'rootMenuElement.html'))
jsClass = u'rootMenu.RootMenuWidget'
def __init__(self, p_workspace_obj):
self.m_pyhouse_obj = p_workspace_obj.m_pyhouse_obj
@renderer
def XXdoRootMenuReload(self, _p_json):
""" Process a message for a XML save/reload from the browser/client.
"""
LOG.info("Self: {}".format(self))
self.m_pyhouse_obj.XXPyHouseMainApi.SaveXml(self.m_pyhouse_obj)
@renderer
def doRootMenuQuit(self, p_json):
""" Process a message for a browser logoff and quit that came from the browser/client.
"""
LOG.info("Self: {}; JSON: {}".format(self, p_json))
# ## END DBK<|fim▁end|> | |
<|file_name|>client.go<|end_file_name|><|fim▁begin|>// Package machinelearningservices implements the Azure ARM Machinelearningservices service API version 2019-06-01.
//
// These APIs allow end users to operate on Azure Machine Learning Workspace resources.
package machinelearningservices
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
<|fim▁hole|>const (
// DefaultBaseURI is the default URI used for the service Machinelearningservices
DefaultBaseURI = "https://management.azure.com"
)
// BaseClient is the base client for Machinelearningservices.
type BaseClient struct {
autorest.Client
BaseURI string
SubscriptionID string
}
// New creates an instance of the BaseClient client.
func New(subscriptionID string) BaseClient {
return NewWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewWithBaseURI creates an instance of the BaseClient client using a custom endpoint. Use this when interacting with
// an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewWithBaseURI(baseURI string, subscriptionID string) BaseClient {
return BaseClient{
Client: autorest.NewClientWithUserAgent(UserAgent()),
BaseURI: baseURI,
SubscriptionID: subscriptionID,
}
}<|fim▁end|> | import (
"github.com/Azure/go-autorest/autorest"
)
|
<|file_name|>git.cc<|end_file_name|><|fim▁begin|>// This file is part of MLDB. Copyright 2015 mldb.ai inc. All rights reserved.
/** git.cc
Jeremy Barnes, 14 November 2015
Copyright (c) mldb.ai inc. All rights reserved.
*/
#include "mldb/core/procedure.h"
#include "mldb/core/dataset.h"
#include "mldb/base/per_thread_accumulator.h"
#include "mldb/types/url.h"
#include "mldb/types/structure_description.h"
#include "mldb/types/vector_description.h"
#include "mldb/types/any_impl.h"
#include "mldb/vfs/fs_utils.h"
#include "mldb/base/scope.h"
#include "mldb/utils/distribution.h"
#include "mldb/base/parallel.h"
#include <boost/algorithm/string.hpp>
#include "mldb/types/annotated_exception.h"
#include "mldb/utils/log.h"
#include "mldb/ext/libgit2/include/git2.h"
#include "mldb/ext/libgit2/include/git2/revwalk.h"
#include "mldb/ext/libgit2/include/git2/commit.h"
#include "mldb/ext/libgit2/include/git2/diff.h"
struct GitFileOperation {
GitFileOperation()
: insertions(0), deletions(0)
{
}
int insertions;
int deletions;
std::string op;
};
struct GitFileStats {
GitFileStats()
: insertions(0), deletions(0)
{
}
std::map<std::string, GitFileOperation> files;
int insertions;
int deletions;
};
int stats_by_file_each_file_cb(const git_diff_delta *delta,
float progress,
void *payload)
{
GitFileStats & stats = *((GitFileStats *)payload);
GitFileOperation op;
switch (delta->status) {
case GIT_DELTA_UNMODIFIED: /** no changes */
return 0;
case GIT_DELTA_ADDED: /** entry does not exist in old version */
op.op = "added";
break;
case GIT_DELTA_DELETED: /** entry does not exist in new version */
op.op = "deleted";
break;
case GIT_DELTA_MODIFIED: /** entry content changed between old and new */
op.op = "modified";
break;
case GIT_DELTA_RENAMED: /** entry was renamed between old and new */
op.op = "renamed";
break;
case GIT_DELTA_COPIED: /** entry was copied from another old entry */
op.op = "copied";
break;
case GIT_DELTA_IGNORED: /** entry is ignored item in workdir */
return 0;
case GIT_DELTA_UNTRACKED: /** entry is untracked item in workdir */
return 0;
case GIT_DELTA_TYPECHANGE: /** type of entry changed between old and new */
return 0;
default:
throw std::logic_error("git status");
}
if (delta->old_file.path)
stats.files[delta->old_file.path] = op;
return 0;
}
int stats_by_file_each_hunk_cb(const git_diff_delta *delta,
const git_diff_hunk * hunk,
void *payload)
{
GitFileStats & stats = *((GitFileStats *)payload);
if (delta->old_file.path)
stats.files[delta->old_file.path].deletions += hunk->old_lines;
if (delta->new_file.path)
stats.files[delta->new_file.path].insertions += hunk->new_lines;
stats.insertions += hunk->new_lines;
stats.deletions += hunk->old_lines;
return 0;
}
GitFileStats git_diff_by_file(git_diff *diff)
{
GitFileStats result;
int error = git_diff_foreach(diff,
stats_by_file_each_file_cb,
nullptr, /* binary callback */
stats_by_file_each_hunk_cb,
nullptr, /* line callback */
&result);
if (error < 0) {
throw MLDB::AnnotatedException(400, "Error traversing diff: "
+ std::string(giterr_last()->message));
}
return result;
}
using namespace std;
namespace MLDB {
/*****************************************************************************/
/* GIT IMPORTER */
/*****************************************************************************/
struct GitImporterConfig : ProcedureConfig {
static constexpr const char * name = "import.git";
GitImporterConfig()
: revisions({"HEAD"}), importStats(false), importTree(false),
ignoreUnknownEncodings(true)
{
outputDataset.withType("sparse.mutable");
}
Url repository;
PolyConfigT<Dataset> outputDataset;
std::vector<std::string> revisions;
bool importStats;
bool importTree;
bool ignoreUnknownEncodings;
// TODO
// when
// where
// limit
// offset
// select (instead of importStats, importTree)
};
DECLARE_STRUCTURE_DESCRIPTION(GitImporterConfig);
DEFINE_STRUCTURE_DESCRIPTION(GitImporterConfig);
GitImporterConfigDescription::
GitImporterConfigDescription()
{
addField("repository", &GitImporterConfig::repository,
"Git repository to load from. This is currently limited to "
"file:// urls which point to an already cloned repository on "
"local disk. Remote repositories will need to be checked out "
"beforehand using the git command line tools.");
addField("outputDataset", &GitImporterConfig::outputDataset,
"Output dataset for result. One row will be produced per commit. "
"See the documentation for the output format.",
PolyConfigT<Dataset>().withType("sparse.mutable"));
std::vector<std::string> defaultRevisions = { "HEAD" };
addField("revisions", &GitImporterConfig::revisions,
"Revisions to load from Git (eg, HEAD, HEAD~20..HEAD, tags/*). "
"See the gitrevisions (7) documentation. Default is all revisions "
"reachable from HEAD", defaultRevisions);
addField("importStats", &GitImporterConfig::importStats,
"If true, then import the stats (number of files "
"changed, lines added and lines deleted)", false);
addField("importTree", &GitImporterConfig::importTree,
"If true, then import the tree (names of files changed)", false);
addField("ignoreUnknownEncodings",
&GitImporterConfig::ignoreUnknownEncodings,
"If true (default), ignore commit messages with unknown encodings "
"(supported are ISO-8859-1 and UTF-8) and replace with a "
"placeholder. If false, messages with unknown encodings will "
"cause the commit to abort.");
addParent<ProcedureConfig>();
}
struct GitImporter: public Procedure {
GitImporter(MldbEngine * owner,
PolyConfig config_,
const std::function<bool (const Json::Value &)> & onProgress)
: Procedure(owner)
{
config = config_.params.convert<GitImporterConfig>();
}
GitImporterConfig config;
std::string encodeOid(const git_oid & oid) const
{
char shortsha[10] = {0};
git_oid_tostr(shortsha, 9, &oid);
return string(shortsha);
};
// Process an individual commit
std::vector<std::tuple<ColumnPath, CellValue, Date> >
processCommit(git_repository * repo, const git_oid & oid) const
{
string sha = encodeOid(oid);
auto checkError = [&] (int error, const char * msg)
{
if (error < 0)
throw AnnotatedException(500, string(msg) + ": "
+ giterr_last()->message,
"repository", config.repository,
"commit", string(sha));
};
git_commit *commit;
int error = git_commit_lookup(&commit, repo, &oid);
checkError(error, "Error getting commit");
Scope_Exit(git_commit_free(commit));
const char *encoding = git_commit_message_encoding(commit);
const char *messageStr = git_commit_message(commit);
git_time_t time = git_commit_time(commit);
int offset_in_min = git_commit_time_offset(commit);
const git_signature *committer = git_commit_committer(commit);
const git_signature *author = git_commit_author(commit);
//const git_oid *tree_id = git_commit_tree_id(commit);
git_diff *diff = nullptr;
Scope_Exit(git_diff_free(diff));
Utf8String message;
if (!encoding || strcmp(encoding, "UTF-8") == 0) {
message = Utf8String(messageStr);
}
else if (strcmp(encoding,"ISO-8859-1") == 0) {
message = Utf8String::fromLatin1(messageStr);
}
else if (config.ignoreUnknownEncodings) {
message = "<<<couldn't decode message in "
+ string(encoding) + " character set>>>";
}
else {
throw AnnotatedException(500,
"Can't decode unknown commit message encoding",
"repository", config.repository,
"commit", string(sha),
"encoding", encoding);
}
vector<string> parents;
unsigned int parentCount = git_commit_parentcount(commit);
for (unsigned i = 0; i < parentCount; ++i) {
const git_oid *nth_parent_id = git_commit_parent_id(commit, i);
git_commit *nth_parent = nullptr;
int error = git_commit_parent(&nth_parent, commit, i);
checkError(error, "Error getting commit parent");
Scope_Exit(git_commit_free(nth_parent));
parents.emplace_back(encodeOid(*nth_parent_id));
if (i == 0 && parentCount == 1
&& (config.importStats || config.importTree)) {
const git_oid * parent_tree_id = git_commit_tree_id(nth_parent);
if (parent_tree_id) {
git_tree * tree = nullptr;
git_tree * parentTree = nullptr;
error = git_commit_tree(&tree, commit);
checkError(error, "Error getting commit tree");
Scope_Exit(git_tree_free(tree));
error = git_commit_tree(&parentTree, nth_parent);
checkError(error, "Error getting parent tree");
Scope_Exit(git_tree_free(parentTree));
error = git_diff_tree_to_tree(&diff, repo, tree, parentTree, NULL);
checkError(error, "Error diffing commits");
git_diff_find_options opts = GIT_DIFF_FIND_OPTIONS_INIT;
opts.flags = GIT_DIFF_FIND_RENAMES |
GIT_DIFF_FIND_COPIES |
GIT_DIFF_FIND_FOR_UNTRACKED;
error = git_diff_find_similar(diff, &opts);
checkError(error, "Error detecting renames");
}
}
}
Date timestamp = Date::fromSecondsSinceEpoch(time + 60 * offset_in_min);
Utf8String committerName(committer->name);
Utf8String committerEmail(committer->email);
Utf8String authorName(author->name);
Utf8String authorEmail(author->email);
std::vector<std::tuple<ColumnPath, CellValue, Date> > row;
row.emplace_back(ColumnPath("committer"), committerName, timestamp);
row.emplace_back(ColumnPath("committerEmail"), committerEmail, timestamp);
row.emplace_back(ColumnPath("author"), authorName, timestamp);
row.emplace_back(ColumnPath("authorEmail"), authorEmail, timestamp);
row.emplace_back(ColumnPath("message"), message, timestamp);
row.emplace_back(ColumnPath("parentCount"), parentCount, timestamp);
for (auto & p: parents)
row.emplace_back(ColumnPath("parent"), p, timestamp);
int filesChanged = 0;
int insertions = 0;
int deletions = 0;
if (diff) {
GitFileStats stats = git_diff_by_file(diff);
filesChanged = stats.files.size();
insertions = stats.insertions;
deletions = stats.deletions;
row.emplace_back(ColumnPath("insertions"), insertions, timestamp);
row.emplace_back(ColumnPath("deletions"), deletions, timestamp);
row.emplace_back(ColumnPath("filesChanged"), filesChanged, timestamp);
for (auto & f: stats.files) {
if (!config.importTree) break;
Utf8String filename(f.first);
row.emplace_back(ColumnPath("file"), filename, timestamp);
if (f.second.insertions > 0)
row.emplace_back(ColumnPath("file." + filename + ".insertions"),
f.second.insertions, timestamp);
if (f.second.deletions > 0)
row.emplace_back(ColumnPath("file." + filename + ".deletions"),
f.second.deletions, timestamp);
if (!f.second.op.empty())
row.emplace_back(ColumnPath("file." + filename + ".op"),
f.second.op, timestamp);
}
}
DEBUG_MSG(logger)
<< "id " << sha << " had " << filesChanged << " changes, "
<< insertions << " insertions and " << deletions << " deletions "
<< message << " parents " << parentCount;
return row;
}
virtual RunOutput run(const ProcedureRunConfig & run,
const std::function<bool (const Json::Value &)> & onProgress) const
{
auto runProcConf = applyRunConfOverProcConf(config, run);
auto checkError = [&] (int error, const char * msg)
{
if (error < 0)
throw AnnotatedException(500, string(msg) + ": "
+ giterr_last()->message,
"repository", runProcConf.repository);
};
git_libgit2_init();
Scope_Exit(git_libgit2_shutdown());
git_repository * repo;
Utf8String repoName(runProcConf.repository.toString());
repoName.removePrefix("file://");
int error = git_repository_open(&repo, repoName.rawData());
checkError(error, "Error opening git repository");
Scope_Exit(git_repository_free(repo));
// Create the output dataset
std::shared_ptr<Dataset> output;
if (!runProcConf.outputDataset.type.empty()
|| !runProcConf.outputDataset.id.empty()) {
output = createDataset(engine, runProcConf.outputDataset,
nullptr, true /*overwrite*/);
}
git_revwalk *walker;
error = git_revwalk_new(&walker, repo);
checkError(error, "Error creating commit walker");
Scope_Exit(git_revwalk_free(walker));
for (auto & r: runProcConf.revisions) {
if (r.find("*") != string::npos)
error = git_revwalk_push_glob(walker, r.c_str());
else if (r.find("..") != string::npos)
error = git_revwalk_push_range(walker, r.c_str());
else error = git_revwalk_push_ref(walker, r.c_str());
if (error < 0)
throw AnnotatedException(500, "Error adding revision: "
+ string(giterr_last()->message),
"repository", runProcConf.repository,
"revision", r);
}
vector<git_oid> oids;
git_oid oid;
while (!git_revwalk_next(&oid, walker)) {
oids.push_back(oid);
}
struct Accum {
Accum(const Utf8String & filename)
{
rows.reserve(1000);
int error = git_repository_open(&repo, filename.rawData());
if (error < 0)
throw AnnotatedException(400, "Error opening Git repo: "
+ string(giterr_last()->message));
}<|fim▁hole|> }
std::vector<std::pair<RowPath, std::vector<std::tuple<ColumnPath, CellValue, Date> > > > rows;
git_repository * repo;
};
PerThreadAccumulator<Accum> accum([&] () { return new Accum(repoName); });
INFO_MSG(logger) << "processing " << oids.size() << " commits";
auto doProcessCommit = [&] (int i)
{
if (i && i % 100 == 0)
INFO_MSG(logger)
<< "imported commit " << i << " of " << oids.size();
Accum & threadAccum = accum.get();
auto row = processCommit(repo, oids[i]);
threadAccum.rows.emplace_back(RowPath(encodeOid(oids[i])),
std::move(row));
if (threadAccum.rows.size() == 1000) {
output->recordRows(threadAccum.rows);
threadAccum.rows.clear();
}
};
parallelMap(0, oids.size(), doProcessCommit);
for (auto & t: accum.threads) {
output->recordRows(t->rows);
}
output->commit();
RunOutput result;
return result;
}
virtual Any getStatus() const
{
return Any();
}
GitImporterConfig procConfig;
};
RegisterProcedureType<GitImporter, GitImporterConfig>
regGit(builtinPackage(),
"Import a Git repository's metadata into MLDB",
"procedures/GitImporter.md.html");
} // namespace MLDB<|fim▁end|> |
~Accum()
{
git_repository_free(repo); |
<|file_name|>JavacTreeMaker.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2013-2018 The Project Lombok Authors.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package lombok.javac;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;<|fim▁hole|>import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.tree.JCTree;
import com.sun.tools.javac.tree.JCTree.JCAnnotation;
import com.sun.tools.javac.tree.JCTree.JCArrayAccess;
import com.sun.tools.javac.tree.JCTree.JCArrayTypeTree;
import com.sun.tools.javac.tree.JCTree.JCAssert;
import com.sun.tools.javac.tree.JCTree.JCAssign;
import com.sun.tools.javac.tree.JCTree.JCAssignOp;
import com.sun.tools.javac.tree.JCTree.JCBinary;
import com.sun.tools.javac.tree.JCTree.JCBlock;
import com.sun.tools.javac.tree.JCTree.JCBreak;
import com.sun.tools.javac.tree.JCTree.JCCase;
import com.sun.tools.javac.tree.JCTree.JCCatch;
import com.sun.tools.javac.tree.JCTree.JCClassDecl;
import com.sun.tools.javac.tree.JCTree.JCCompilationUnit;
import com.sun.tools.javac.tree.JCTree.JCConditional;
import com.sun.tools.javac.tree.JCTree.JCContinue;
import com.sun.tools.javac.tree.JCTree.JCDoWhileLoop;
import com.sun.tools.javac.tree.JCTree.JCEnhancedForLoop;
import com.sun.tools.javac.tree.JCTree.JCErroneous;
import com.sun.tools.javac.tree.JCTree.JCExpression;
import com.sun.tools.javac.tree.JCTree.JCExpressionStatement;
import com.sun.tools.javac.tree.JCTree.JCFieldAccess;
import com.sun.tools.javac.tree.JCTree.JCForLoop;
import com.sun.tools.javac.tree.JCTree.JCIdent;
import com.sun.tools.javac.tree.JCTree.JCIf;
import com.sun.tools.javac.tree.JCTree.JCImport;
import com.sun.tools.javac.tree.JCTree.JCInstanceOf;
import com.sun.tools.javac.tree.JCTree.JCLabeledStatement;
import com.sun.tools.javac.tree.JCTree.JCLiteral;
import com.sun.tools.javac.tree.JCTree.JCMethodDecl;
import com.sun.tools.javac.tree.JCTree.JCMethodInvocation;
import com.sun.tools.javac.tree.JCTree.JCModifiers;
import com.sun.tools.javac.tree.JCTree.JCNewArray;
import com.sun.tools.javac.tree.JCTree.JCNewClass;
import com.sun.tools.javac.tree.JCTree.JCParens;
import com.sun.tools.javac.tree.JCTree.JCPrimitiveTypeTree;
import com.sun.tools.javac.tree.JCTree.JCReturn;
import com.sun.tools.javac.tree.JCTree.JCSkip;
import com.sun.tools.javac.tree.JCTree.JCStatement;
import com.sun.tools.javac.tree.JCTree.JCSwitch;
import com.sun.tools.javac.tree.JCTree.JCSynchronized;
import com.sun.tools.javac.tree.JCTree.JCThrow;
import com.sun.tools.javac.tree.JCTree.JCTry;
import com.sun.tools.javac.tree.JCTree.JCTypeApply;
import com.sun.tools.javac.tree.JCTree.JCTypeCast;
import com.sun.tools.javac.tree.JCTree.JCTypeParameter;
import com.sun.tools.javac.tree.JCTree.JCUnary;
import com.sun.tools.javac.tree.JCTree.JCVariableDecl;
import com.sun.tools.javac.tree.JCTree.JCWhileLoop;
import com.sun.tools.javac.tree.JCTree.JCWildcard;
import com.sun.tools.javac.tree.JCTree.LetExpr;
import com.sun.tools.javac.tree.JCTree.TypeBoundKind;
import com.sun.tools.javac.tree.TreeInfo;
import com.sun.tools.javac.tree.TreeMaker;
import com.sun.tools.javac.util.List;
import com.sun.tools.javac.util.Name;
import lombok.permit.Permit;
public class JavacTreeMaker {
private final TreeMaker tm;
public JavacTreeMaker(TreeMaker tm) {
this.tm = tm;
}
public TreeMaker getUnderlyingTreeMaker() {
return tm;
}
public JavacTreeMaker at(int pos) {
tm.at(pos);
return this;
}
private static class MethodId<J> {
private final Class<?> owner;
private final String name;
private final Class<J> returnType;
private final Class<?>[] paramTypes;
MethodId(Class<?> owner, String name, Class<J> returnType, Class<?>... types) {
this.owner = owner;
this.name = name;
this.paramTypes = types;
this.returnType = returnType;
}
@Override public String toString() {
StringBuilder out = new StringBuilder();
out.append(returnType.getName()).append(" ").append(owner.getName()).append(".").append(name).append("(");
boolean f = true;
for (Class<?> p : paramTypes) {
if (f) f = false;
else out.append(", ");
out.append(p.getName());
}
return out.append(")").toString();
}
}
private static class SchroedingerType {
final Object value;
private SchroedingerType(Object value) {
this.value = value;
}
@Override public int hashCode() {
return value == null ? -1 : value.hashCode();
}
@Override public boolean equals(Object obj) {
if (obj instanceof SchroedingerType) {
Object other = ((SchroedingerType) obj).value;
return value == null ? other == null : value.equals(other);
}
return false;
}
static Object getFieldCached(ConcurrentMap<String, Object> cache, String className, String fieldName) {
Object value = cache.get(fieldName);
if (value != null) return value;
try {
value = Permit.getField(Class.forName(className), fieldName).get(null);
} catch (NoSuchFieldException e) {
throw Javac.sneakyThrow(e);
} catch (IllegalAccessException e) {
throw Javac.sneakyThrow(e);
} catch (ClassNotFoundException e) {
throw Javac.sneakyThrow(e);
}
cache.putIfAbsent(fieldName, value);
return value;
}
private static Field NOSUCHFIELDEX_MARKER;
static {
try {
NOSUCHFIELDEX_MARKER = Permit.getField(SchroedingerType.class, "NOSUCHFIELDEX_MARKER");
} catch (NoSuchFieldException e) {
throw Javac.sneakyThrow(e);
}
}
static Object getFieldCached(ConcurrentMap<Class<?>, Field> cache, Object ref, String fieldName) throws NoSuchFieldException {
Class<?> c = ref.getClass();
Field field = cache.get(c);
if (field == null) {
try {
field = Permit.getField(c, fieldName);
} catch (NoSuchFieldException e) {
cache.putIfAbsent(c, NOSUCHFIELDEX_MARKER);
throw Javac.sneakyThrow(e);
}
Permit.setAccessible(field);
Field old = cache.putIfAbsent(c, field);
if (old != null) field = old;
}
if (field == NOSUCHFIELDEX_MARKER) throw new NoSuchFieldException(fieldName);
try {
return field.get(ref);
} catch (IllegalAccessException e) {
throw Javac.sneakyThrow(e);
}
}
}
public static class TypeTag extends SchroedingerType {
private static final ConcurrentMap<String, Object> TYPE_TAG_CACHE = new ConcurrentHashMap<String, Object>();
private static final ConcurrentMap<Class<?>, Field> FIELD_CACHE = new ConcurrentHashMap<Class<?>, Field>();
private static final Method TYPE_TYPETAG_METHOD;
static {
Method m = null;
try {
m = Permit.getMethod(Type.class, "getTag");
} catch (NoSuchMethodException e) {}
TYPE_TYPETAG_METHOD = m;
}
private TypeTag(Object value) {
super(value);
}
public static TypeTag typeTag(JCTree o) {
try {
return new TypeTag(getFieldCached(FIELD_CACHE, o, "typetag"));
} catch (NoSuchFieldException e) {
throw Javac.sneakyThrow(e);
}
}
public static TypeTag typeTag(Type t) {
if (t == null) return Javac.CTC_VOID;
try {
return new TypeTag(getFieldCached(FIELD_CACHE, t, "tag"));
} catch (NoSuchFieldException e) {
if (TYPE_TYPETAG_METHOD == null) throw new IllegalStateException("Type " + t.getClass() + " has neither 'tag' nor getTag()");
try {
return new TypeTag(TYPE_TYPETAG_METHOD.invoke(t));
} catch (IllegalAccessException ex) {
throw Javac.sneakyThrow(ex);
} catch (InvocationTargetException ex) {
throw Javac.sneakyThrow(ex.getCause());
}
}
}
public static TypeTag typeTag(String identifier) {
return new TypeTag(getFieldCached(TYPE_TAG_CACHE, Javac.getJavaCompilerVersion() < 8 ? "com.sun.tools.javac.code.TypeTags" : "com.sun.tools.javac.code.TypeTag", identifier));
}
}
public static class TreeTag extends SchroedingerType {
private static final ConcurrentMap<String, Object> TREE_TAG_CACHE = new ConcurrentHashMap<String, Object>();
private static final Field TAG_FIELD;
private static final Method TAG_METHOD;
private static final MethodId<Integer> OP_PREC = MethodId(TreeInfo.class, "opPrec", int.class, TreeTag.class);
static {
Method m = null;
try {
m = Permit.getMethod(JCTree.class, "getTag");
} catch (NoSuchMethodException e) {}
if (m != null) {
TAG_FIELD = null;
TAG_METHOD = m;
} else {
Field f = null;
try {
f = Permit.getField(JCTree.class, "tag");
} catch (NoSuchFieldException e) {}
TAG_FIELD = f;
TAG_METHOD = null;
}
}
private TreeTag(Object value) {
super(value);
}
public static TreeTag treeTag(JCTree o) {
try {
if (TAG_METHOD != null) return new TreeTag(TAG_METHOD.invoke(o));
else return new TreeTag(TAG_FIELD.get(o));
} catch (InvocationTargetException e) {
throw Javac.sneakyThrow(e.getCause());
} catch (IllegalAccessException e) {
throw Javac.sneakyThrow(e);
}
}
public static TreeTag treeTag(String identifier) {
return new TreeTag(getFieldCached(TREE_TAG_CACHE, Javac.getJavaCompilerVersion() < 8 ? "com.sun.tools.javac.tree.JCTree" : "com.sun.tools.javac.tree.JCTree$Tag", identifier));
}
public int getOperatorPrecedenceLevel() {
return invokeAny(null, OP_PREC, value);
}
public boolean isPrefixUnaryOp() {
return Javac.CTC_NEG.equals(this) || Javac.CTC_POS.equals(this) || Javac.CTC_NOT.equals(this) || Javac.CTC_COMPL.equals(this) || Javac.CTC_PREDEC.equals(this) || Javac.CTC_PREINC.equals(this);
}
}
static <J> MethodId<J> MethodId(Class<?> owner, String name, Class<J> returnType, Class<?>... types) {
return new MethodId<J>(owner, name, returnType, types);
}
/**
* Creates a new method ID based on the name of the method to invoke, the return type of that method, and the types of the parameters.
*
* A method matches if the return type matches, and for each parameter the following holds:
*
* Either (A) the type listed here is the same as, or a subtype of, the type of the method in javac's TreeMaker, or
* (B) the type listed here is a subtype of SchroedingerType.
*/
static <J> MethodId<J> MethodId(String name, Class<J> returnType, Class<?>... types) {
return new MethodId<J>(TreeMaker.class, name, returnType, types);
}
/**
* Creates a new method ID based on the name of a method in this class, assuming the name of the method to invoke in TreeMaker has the same name,
* the same return type, and the same parameters (under the same rules as the other MethodId method).
*/
static <J> MethodId<J> MethodId(String name) {
for (Method m : JavacTreeMaker.class.getDeclaredMethods()) {
if (m.getName().equals(name)) {
@SuppressWarnings("unchecked") Class<J> r = (Class<J>) m.getReturnType();
Class<?>[] p = m.getParameterTypes();
return new MethodId<J>(TreeMaker.class, name, r, p);
}
}
throw new InternalError("Not found: " + name);
}
private static final Object METHOD_NOT_FOUND = new Object[0];
private static final Object METHOD_MULTIPLE_FOUND = new Object[0];
private static final ConcurrentHashMap<MethodId<?>, Object> METHOD_CACHE = new ConcurrentHashMap<MethodId<?>, Object>();
private <J> J invoke(MethodId<J> m, Object... args) {
return invokeAny(tm, m, args);
}
@SuppressWarnings("unchecked") private static <J> J invokeAny(Object owner, MethodId<J> m, Object... args) {
Method method = getFromCache(m);
try {
if (m.returnType.isPrimitive()) {
Object res = method.invoke(owner, args);
String sn = res.getClass().getSimpleName().toLowerCase();
if (!sn.startsWith(m.returnType.getSimpleName())) throw new ClassCastException(res.getClass() + " to " + m.returnType);
return (J) res;
}
return m.returnType.cast(method.invoke(owner, args));
} catch (InvocationTargetException e) {
throw Javac.sneakyThrow(e.getCause());
} catch (IllegalAccessException e) {
throw Javac.sneakyThrow(e);
} catch (IllegalArgumentException e) {
System.err.println(method);
throw Javac.sneakyThrow(e);
}
}
private static boolean tryResolve(MethodId<?> m) {
Object s = METHOD_CACHE.get(m);
if (s == null) s = addToCache(m);
if (s instanceof Method) return true;
return false;
}
private static Method getFromCache(MethodId<?> m) {
Object s = METHOD_CACHE.get(m);
if (s == null) s = addToCache(m);
if (s == METHOD_MULTIPLE_FOUND) throw new IllegalStateException("Lombok TreeMaker frontend issue: multiple matches when looking for method: " + m);
if (s == METHOD_NOT_FOUND) throw new IllegalStateException("Lombok TreeMaker frontend issue: no match when looking for method: " + m);
return (Method) s;
}
private static Object addToCache(MethodId<?> m) {
Method found = null;
outer:
for (Method method : m.owner.getDeclaredMethods()) {
if (!m.name.equals(method.getName())) continue;
Class<?>[] t = method.getParameterTypes();
if (t.length != m.paramTypes.length) continue;
for (int i = 0; i < t.length; i++) {
if (Symbol.class.isAssignableFrom(t[i])) continue outer;
if (!SchroedingerType.class.isAssignableFrom(m.paramTypes[i])) {
if (t[i].isPrimitive()) {
if (t[i] != m.paramTypes[i]) continue outer;
} else {
if (!t[i].isAssignableFrom(m.paramTypes[i])) continue outer;
}
}
}
if (found == null) found = method;
else {
METHOD_CACHE.putIfAbsent(m, METHOD_MULTIPLE_FOUND);
return METHOD_MULTIPLE_FOUND;
}
}
if (found == null) {
METHOD_CACHE.putIfAbsent(m, METHOD_NOT_FOUND);
return METHOD_NOT_FOUND;
}
Permit.setAccessible(found);
Object marker = METHOD_CACHE.putIfAbsent(m, found);
if (marker == null) return found;
return marker;
}
//javac versions: 6-8
private static final MethodId<JCCompilationUnit> TopLevel = MethodId("TopLevel");
public JCCompilationUnit TopLevel(List<JCAnnotation> packageAnnotations, JCExpression pid, List<JCTree> defs) {
return invoke(TopLevel, packageAnnotations, pid, defs);
}
//javac versions: 6-8
private static final MethodId<JCImport> Import = MethodId("Import");
public JCImport Import(JCTree qualid, boolean staticImport) {
return invoke(Import, qualid, staticImport);
}
//javac versions: 6-8
private static final MethodId<JCClassDecl> ClassDef = MethodId("ClassDef");
public JCClassDecl ClassDef(JCModifiers mods, Name name, List<JCTypeParameter> typarams, JCExpression extending, List<JCExpression> implementing, List<JCTree> defs) {
return invoke(ClassDef, mods, name, typarams, extending, implementing, defs);
}
//javac versions: 6-8
private static final MethodId<JCMethodDecl> MethodDef = MethodId("MethodDef", JCMethodDecl.class, JCModifiers.class, Name.class, JCExpression.class, List.class, List.class, List.class, JCBlock.class, JCExpression.class);
public JCMethodDecl MethodDef(JCModifiers mods, Name name, JCExpression resType, List<JCTypeParameter> typarams, List<JCVariableDecl> params, List<JCExpression> thrown, JCBlock body, JCExpression defaultValue) {
return invoke(MethodDef, mods, name, resType, typarams, params, thrown, body, defaultValue);
}
//javac versions: 8
private static final MethodId<JCMethodDecl> MethodDefWithRecvParam = MethodId("MethodDef", JCMethodDecl.class, JCModifiers.class, Name.class, JCExpression.class, List.class, JCVariableDecl.class, List.class, List.class, JCBlock.class, JCExpression.class);
public JCMethodDecl MethodDef(JCModifiers mods, Name name, JCExpression resType, List<JCTypeParameter> typarams, JCVariableDecl recvparam, List<JCVariableDecl> params, List<JCExpression> thrown, JCBlock body, JCExpression defaultValue) {
return invoke(MethodDefWithRecvParam, mods, name, resType, recvparam, typarams, params, thrown, body, defaultValue);
}
//javac versions: 6-8
private static final MethodId<JCVariableDecl> VarDef = MethodId("VarDef");
public JCVariableDecl VarDef(JCModifiers mods, Name name, JCExpression vartype, JCExpression init) {
JCVariableDecl varDef = invoke(VarDef, mods, name, vartype, init);
// We use 'position of the type is -1' as indicator in delombok that the original node was written using JDK10's 'var' feature, because javac desugars 'var' to the real type and doesn't leave any markers other than the
// node position to indicate that it did so. Unfortunately, that means vardecls we generate look like 'var' to delombok. Adjust the position to avoid this.
if (varDef.vartype != null && varDef.vartype.pos == -1) varDef.vartype.pos = 0;
return varDef;
}
//javac versions: 8
private static final MethodId<JCVariableDecl> ReceiverVarDef = MethodId("ReceiverVarDef");
public JCVariableDecl ReceiverVarDef(JCModifiers mods, JCExpression name, JCExpression vartype) {
return invoke(ReceiverVarDef, mods, name, vartype);
}
//javac versions: 6-8
private static final MethodId<JCSkip> Skip = MethodId("Skip");
public JCSkip Skip() {
return invoke(Skip);
}
//javac versions: 6-8
private static final MethodId<JCBlock> Block = MethodId("Block");
public JCBlock Block(long flags, List<JCStatement> stats) {
return invoke(Block, flags, stats);
}
//javac versions: 6-8
private static final MethodId<JCDoWhileLoop> DoLoop = MethodId("DoLoop");
public JCDoWhileLoop DoLoop(JCStatement body, JCExpression cond) {
return invoke(DoLoop, body, cond);
}
//javac versions: 6-8
private static final MethodId<JCWhileLoop> WhileLoop = MethodId("WhileLoop");
public JCWhileLoop WhileLoop(JCExpression cond, JCStatement body) {
return invoke(WhileLoop, cond, body);
}
//javac versions: 6-8
private static final MethodId<JCForLoop> ForLoop = MethodId("ForLoop");
public JCForLoop ForLoop(List<JCStatement> init, JCExpression cond, List<JCExpressionStatement> step, JCStatement body) {
return invoke(ForLoop, init, cond, step, body);
}
//javac versions: 6-8
private static final MethodId<JCEnhancedForLoop> ForeachLoop = MethodId("ForeachLoop");
public JCEnhancedForLoop ForeachLoop(JCVariableDecl var, JCExpression expr, JCStatement body) {
return invoke(ForeachLoop, var, expr, body);
}
//javac versions: 6-8
private static final MethodId<JCLabeledStatement> Labelled = MethodId("Labelled");
public JCLabeledStatement Labelled(Name label, JCStatement body) {
return invoke(Labelled, label, body);
}
//javac versions: 6-8
private static final MethodId<JCSwitch> Switch = MethodId("Switch");
public JCSwitch Switch(JCExpression selector, List<JCCase> cases) {
return invoke(Switch, selector, cases);
}
//javac versions: 6-11
private static final MethodId<JCCase> Case11 = MethodId("Case", JCCase.class, JCExpression.class, com.sun.tools.javac.util.List.class);
//javac version: 12+
public static class Case12 {
private static final Class<?> CASE_KIND_CLASS = classForName(TreeMaker.class, "com.sun.source.tree.CaseTree$CaseKind");
static final MethodId<JCCase> Case12 = MethodId("Case", JCCase.class, CASE_KIND_CLASS, com.sun.tools.javac.util.List.class, com.sun.tools.javac.util.List.class, JCTree.class);
static final Object CASE_KIND_STATEMENT = CASE_KIND_CLASS.getEnumConstants()[0];
}
static Class<?> classForName(Class<?> context, String name) {
try {
return context.getClassLoader().loadClass(name);
} catch (ClassNotFoundException e) {
Error x = new NoClassDefFoundError(e.getMessage());
x.setStackTrace(e.getStackTrace());
throw x;
}
}
public JCCase Case(JCExpression pat, List<JCStatement> stats) {
if (tryResolve(Case11)) return invoke(Case11, pat, stats);
return invoke(Case12.Case12, Case12.CASE_KIND_STATEMENT, pat == null ? com.sun.tools.javac.util.List.nil() : com.sun.tools.javac.util.List.of(pat), stats, null);
}
//javac versions: 6-8
private static final MethodId<JCSynchronized> Synchronized = MethodId("Synchronized");
public JCSynchronized Synchronized(JCExpression lock, JCBlock body) {
return invoke(Synchronized, lock, body);
}
//javac versions: 6-8
private static final MethodId<JCTry> Try = MethodId("Try", JCTry.class, JCBlock.class, List.class, JCBlock.class);
public JCTry Try(JCBlock body, List<JCCatch> catchers, JCBlock finalizer) {
return invoke(Try, body, catchers, finalizer);
}
//javac versions: 7-8
private static final MethodId<JCTry> TryWithResources = MethodId("Try", JCTry.class, List.class, JCBlock.class, List.class, JCBlock.class);
public JCTry Try(List<JCTree> resources, JCBlock body, List<JCCatch> catchers, JCBlock finalizer) {
return invoke(TryWithResources, resources, body, catchers, finalizer);
}
//javac versions: 6-8
private static final MethodId<JCCatch> Catch = MethodId("Catch");
public JCCatch Catch(JCVariableDecl param, JCBlock body) {
return invoke(Catch, param, body);
}
//javac versions: 6-8
private static final MethodId<JCConditional> Conditional = MethodId("Conditional");
public JCConditional Conditional(JCExpression cond, JCExpression thenpart, JCExpression elsepart) {
return invoke(Conditional, cond, thenpart, elsepart);
}
//javac versions: 6-8
private static final MethodId<JCIf> If = MethodId("If");
public JCIf If(JCExpression cond, JCStatement thenpart, JCStatement elsepart) {
return invoke(If, cond, thenpart, elsepart);
}
//javac versions: 6-8
private static final MethodId<JCExpressionStatement> Exec = MethodId("Exec");
public JCExpressionStatement Exec(JCExpression expr) {
return invoke(Exec, expr);
}
//javac version: 6-11
private static final MethodId<JCBreak> Break11 = MethodId("Break", JCBreak.class, Name.class);
//javac version: 12+
private static final MethodId<JCBreak> Break12 = MethodId("Break", JCBreak.class, JCExpression.class);
public JCBreak Break(Name label) {
if (tryResolve(Break11)) return invoke(Break11, label);
return invoke(Break12, label != null ? Ident(label) : null);
}
//javac versions: 6-8
private static final MethodId<JCContinue> Continue = MethodId("Continue");
public JCContinue Continue(Name label) {
return invoke(Continue, label);
}
//javac versions: 6-8
private static final MethodId<JCReturn> Return = MethodId("Return");
public JCReturn Return(JCExpression expr) {
return invoke(Return, expr);
}
//javac versions: 6-8
private static final MethodId<JCThrow> Throw = MethodId("Throw");
public JCThrow Throw(JCExpression expr) {
return invoke(Throw, expr);
}
//javac versions: 6-8
private static final MethodId<JCAssert> Assert = MethodId("Assert");
public JCAssert Assert(JCExpression cond, JCExpression detail) {
return invoke(Assert, cond, detail);
}
//javac versions: 6-8
private static final MethodId<JCMethodInvocation> Apply = MethodId("Apply");
public JCMethodInvocation Apply(List<JCExpression> typeargs, JCExpression fn, List<JCExpression> args) {
return invoke(Apply, typeargs, fn, args);
}
//javac versions: 6-8
private static final MethodId<JCNewClass> NewClass = MethodId("NewClass");
public JCNewClass NewClass(JCExpression encl, List<JCExpression> typeargs, JCExpression clazz, List<JCExpression> args, JCClassDecl def) {
return invoke(NewClass, encl, typeargs, clazz, args, def);
}
//javac versions: 6-8
private static final MethodId<JCNewArray> NewArray = MethodId("NewArray");
public JCNewArray NewArray(JCExpression elemtype, List<JCExpression> dims, List<JCExpression> elems) {
return invoke(NewArray, elemtype, dims, elems);
}
//javac versions: 8
// private static final MethodId<JCLambda> Lambda = MethodId("Lambda");
// public JCLambda Lambda(List<JCVariableDecl> params, JCTree body) {
// return invoke(Lambda, params, body);
// }
//javac versions: 6-8
private static final MethodId<JCParens> Parens = MethodId("Parens");
public JCParens Parens(JCExpression expr) {
return invoke(Parens, expr);
}
//javac versions: 6-8
private static final MethodId<JCAssign> Assign = MethodId("Assign");
public JCAssign Assign(JCExpression lhs, JCExpression rhs) {
return invoke(Assign, lhs, rhs);
}
//javac versions: 6-8
//opcode = [6-7] int [8] JCTree.Tag
private static final MethodId<JCAssignOp> Assignop = MethodId("Assignop");
public JCAssignOp Assignop(TreeTag opcode, JCTree lhs, JCTree rhs) {
return invoke(Assignop, opcode.value, lhs, rhs);
}
//javac versions: 6-8
//opcode = [6-7] int [8] JCTree.Tag
private static final MethodId<JCUnary> Unary = MethodId("Unary");
public JCUnary Unary(TreeTag opcode, JCExpression arg) {
return invoke(Unary, opcode.value, arg);
}
//javac versions: 6-8
//opcode = [6-7] int [8] JCTree.Tag
private static final MethodId<JCBinary> Binary = MethodId("Binary");
public JCBinary Binary(TreeTag opcode, JCExpression lhs, JCExpression rhs) {
return invoke(Binary, opcode.value, lhs, rhs);
}
//javac versions: 6-8
private static final MethodId<JCTypeCast> TypeCast = MethodId("TypeCast");
public JCTypeCast TypeCast(JCTree expr, JCExpression type) {
return invoke(TypeCast, expr, type);
}
//javac versions: 6-8
private static final MethodId<JCInstanceOf> TypeTest = MethodId("TypeTest");
public JCInstanceOf TypeTest(JCExpression expr, JCTree clazz) {
return invoke(TypeTest, expr, clazz);
}
//javac versions: 6-8
private static final MethodId<JCArrayAccess> Indexed = MethodId("Indexed");
public JCArrayAccess Indexed(JCExpression indexed, JCExpression index) {
return invoke(Indexed, indexed, index);
}
//javac versions: 6-8
private static final MethodId<JCFieldAccess> Select = MethodId("Select");
public JCFieldAccess Select(JCExpression selected, Name selector) {
return invoke(Select, selected, selector);
}
//javac versions: 8
// private static final MethodId<JCMemberReference> Reference = MethodId("Reference");
// public JCMemberReference Reference(JCMemberReference.ReferenceMode mode, Name name, JCExpression expr, List<JCExpression> typeargs) {
// return invoke(Reference, mode, name, expr, typeargs);
// }
//javac versions: 6-8
private static final MethodId<JCIdent> Ident = MethodId("Ident", JCIdent.class, Name.class);
public JCIdent Ident(Name idname) {
return invoke(Ident, idname);
}
//javac versions: 6-8
//tag = [6-7] int [8] TypeTag
private static final MethodId<JCLiteral> Literal = MethodId("Literal", JCLiteral.class, TypeTag.class, Object.class);
public JCLiteral Literal(TypeTag tag, Object value) {
return invoke(Literal, tag.value, value);
}
//javac versions: 6-8
//typetag = [6-7] int [8] TypeTag
private static final MethodId<JCPrimitiveTypeTree> TypeIdent = MethodId("TypeIdent");
public JCPrimitiveTypeTree TypeIdent(TypeTag typetag) {
return invoke(TypeIdent, typetag.value);
}
//javac versions: 6-8
private static final MethodId<JCArrayTypeTree> TypeArray = MethodId("TypeArray");
public JCArrayTypeTree TypeArray(JCExpression elemtype) {
return invoke(TypeArray, elemtype);
}
//javac versions: 6-8
private static final MethodId<JCTypeApply> TypeApply = MethodId("TypeApply");
public JCTypeApply TypeApply(JCExpression clazz, List<JCExpression> arguments) {
return invoke(TypeApply, clazz, arguments);
}
//javac versions: 7-8
// private static final MethodId<JCTypeUnion> TypeUnion = MethodId("TypeUnion");
// public JCTypeUnion TypeUnion(List<JCExpression> components) {
// return invoke(TypeUnion, compoonents);
// }
//javac versions: 8
// private static final MethodId<JCTypeIntersection> TypeIntersection = MethodId("TypeIntersection");
// public JCTypeIntersection TypeIntersection(List<JCExpression> components) {
// return invoke(TypeIntersection, components);
// }
//javac versions: 6-8
private static final MethodId<JCTypeParameter> TypeParameter = MethodId("TypeParameter", JCTypeParameter.class, Name.class, List.class);
public JCTypeParameter TypeParameter(Name name, List<JCExpression> bounds) {
return invoke(TypeParameter, name, bounds);
}
//javac versions: 8
private static final MethodId<JCTypeParameter> TypeParameterWithAnnos = MethodId("TypeParameter", JCTypeParameter.class, Name.class, List.class, List.class);
public JCTypeParameter TypeParameter(Name name, List<JCExpression> bounds, List<JCAnnotation> annos) {
return invoke(TypeParameterWithAnnos, name, bounds, annos);
}
//javac versions: 6-8
private static final MethodId<JCWildcard> Wildcard = MethodId("Wildcard");
public JCWildcard Wildcard(TypeBoundKind kind, JCTree type) {
return invoke(Wildcard, kind, type);
}
//javac versions: 6-8
private static final MethodId<TypeBoundKind> TypeBoundKind = MethodId("TypeBoundKind");
public TypeBoundKind TypeBoundKind(BoundKind kind) {
return invoke(TypeBoundKind, kind);
}
//javac versions: 6-8
private static final MethodId<JCAnnotation> Annotation = MethodId("Annotation", JCAnnotation.class, JCTree.class, List.class);
public JCAnnotation Annotation(JCTree annotationType, List<JCExpression> args) {
return invoke(Annotation, annotationType, args);
}
//javac versions: 8
private static final MethodId<JCAnnotation> TypeAnnotation = MethodId("TypeAnnotation", JCAnnotation.class, JCTree.class, List.class);
public JCAnnotation TypeAnnotation(JCTree annotationType, List<JCExpression> args) {
return invoke(TypeAnnotation, annotationType, args);
}
//javac versions: 6-8
private static final MethodId<JCModifiers> ModifiersWithAnnotations = MethodId("Modifiers", JCModifiers.class, long.class, List.class);
public JCModifiers Modifiers(long flags, List<JCAnnotation> annotations) {
return invoke(ModifiersWithAnnotations, flags, annotations);
}
//javac versions: 6-8
private static final MethodId<JCModifiers> Modifiers = MethodId("Modifiers", JCModifiers.class, long.class);
public JCModifiers Modifiers(long flags) {
return invoke(Modifiers, flags);
}
//javac versions: 8
// private static final MethodId<JCAnnotatedType> AnnotatedType = MethodId("AnnotatedType");
// public JCAnnotatedType AnnotatedType(List<JCAnnotation> annotations, JCExpression underlyingType) {
// return invoke(AnnotatedType, annotations, underlyingType);
// }
//javac versions: 6-8
private static final MethodId<JCErroneous> Erroneous = MethodId("Erroneous", JCErroneous.class);
public JCErroneous Erroneous() {
return invoke(Erroneous);
}
//javac versions: 6-8
private static final MethodId<JCErroneous> ErroneousWithErrs = MethodId("Erroneous", JCErroneous.class, List.class);
public JCErroneous Erroneous(List<? extends JCTree> errs) {
return invoke(ErroneousWithErrs, errs);
}
//javac versions: 6-8
private static final MethodId<LetExpr> LetExpr = MethodId("LetExpr", LetExpr.class, List.class, JCTree.class);
public LetExpr LetExpr(List<JCVariableDecl> defs, JCTree expr) {
return invoke(LetExpr, defs, expr);
}
//javac versions: 6-8
private static final MethodId<JCClassDecl> AnonymousClassDef = MethodId("AnonymousClassDef");
public JCClassDecl AnonymousClassDef(JCModifiers mods, List<JCTree> defs) {
return invoke(AnonymousClassDef, mods, defs);
}
//javac versions: 6-8
private static final MethodId<LetExpr> LetExprSingle = MethodId("LetExpr", LetExpr.class, JCVariableDecl.class, JCTree.class);
public LetExpr LetExpr(JCVariableDecl def, JCTree expr) {
return invoke(LetExprSingle, def, expr);
}
//javac versions: 6-8
private static final MethodId<JCIdent> IdentVarDecl = MethodId("Ident", JCIdent.class, JCVariableDecl.class);
public JCExpression Ident(JCVariableDecl param) {
return invoke(IdentVarDecl, param);
}
//javac versions: 6-8
private static final MethodId<List<JCExpression>> Idents = MethodId("Idents");
public List<JCExpression> Idents(List<JCVariableDecl> params) {
return invoke(Idents, params);
}
//javac versions: 6-8
private static final MethodId<JCMethodInvocation> App2 = MethodId("App", JCMethodInvocation.class, JCExpression.class, List.class);
public JCMethodInvocation App(JCExpression meth, List<JCExpression> args) {
return invoke(App2, meth, args);
}
//javac versions: 6-8
private static final MethodId<JCMethodInvocation> App1 = MethodId("App", JCMethodInvocation.class, JCExpression.class);
public JCMethodInvocation App(JCExpression meth) {
return invoke(App1, meth);
}
//javac versions: 6-8
private static final MethodId<List<JCAnnotation>> Annotations = MethodId("Annotations");
public List<JCAnnotation> Annotations(List<Attribute.Compound> attributes) {
return invoke(Annotations, attributes);
}
//javac versions: 6-8
private static final MethodId<JCLiteral> LiteralWithValue = MethodId("Literal", JCLiteral.class, Object.class);
public JCLiteral Literal(Object value) {
return invoke(LiteralWithValue, value);
}
//javac versions: 6-8
private static final MethodId<JCAnnotation> AnnotationWithAttributeOnly = MethodId("Annotation", JCAnnotation.class, Attribute.class);
public JCAnnotation Annotation(Attribute a) {
return invoke(AnnotationWithAttributeOnly, a);
}
//javac versions: 8
private static final MethodId<JCAnnotation> TypeAnnotationWithAttributeOnly = MethodId("TypeAnnotation", JCAnnotation.class, Attribute.class);
public JCAnnotation TypeAnnotation(Attribute a) {
return invoke(TypeAnnotationWithAttributeOnly, a);
}
//javac versions: 6-8
private static final MethodId<JCStatement> Call = MethodId("Call");
public JCStatement Call(JCExpression apply) {
return invoke(Call, apply);
}
//javac versions: 6-8
private static final MethodId<JCExpression> Type = MethodId("Type");
public JCExpression Type(Type type) {
return invoke(Type, type);
}
}<|fim▁end|> |
import com.sun.tools.javac.code.Attribute;
import com.sun.tools.javac.code.BoundKind; |
<|file_name|>hpdf.py<|end_file_name|><|fim▁begin|>##
## * << Haru Free PDF Library 2.0.8 >> -- hpdf.h
## *
## * URL http://libharu.org/
## *
## * Copyright (c) 1999-2006 Takeshi Kanno
## *
## * Permission to use, copy, modify, distribute and sell this software
## * and its documentation for any purpose is hereby granted without fee,
## * provided that the above copyright notice appear in all copies and
## * that both that copyright notice and this permission notice appear
## * in supporting documentation.
## * It is provided "as is" without express or implied warranty.
## *
##
## port to python by Li Jun
## http://groups.google.com/group/pythoncia
import os
import sys
import types
def setpath():
dllpath='%s/dll' %(os.path.dirname(os.path.realpath(__file__)))
if 'PATH' in os.environ:
if dllpath not in os.environ['PATH']:
os.environ['PATH']='%s;%s' % (dllpath, os.environ['PATH'])
else:
os.environ['PATH']=dllpath
setpath()
from hpdf_consts import *
from hpdf_types import *
if os.sys.platform=='win32':
harudll='libhpdf.dll'
#haru=WinDLL(harudll)
haru=CDLL(harudll)
else:
harudll='libhpdf.so'
haru=CDLL(harudll)
HPDF_HANDLE=c_void_p
HPDF_Doc=HPDF_HANDLE
HPDF_Page=HPDF_HANDLE
HPDF_Pages=HPDF_HANDLE
HPDF_Stream=HPDF_HANDLE
HPDF_Image=HPDF_HANDLE
HPDF_Font=HPDF_HANDLE
HPDF_Outline=HPDF_HANDLE
HPDF_Encoder=HPDF_HANDLE
HPDF_Destination=HPDF_HANDLE
HPDF_XObject=HPDF_HANDLE
HPDF_Annotation=HPDF_HANDLE
HPDF_ExtGState=HPDF_HANDLE
#const char * HPDF_GetVersion (void)
HPDF_GetVersion=haru.HPDF_GetVersion
HPDF_GetVersion.restype=c_char_p
#HPDF_Doc HPDF_NewEx (HPDF_Error_Handler user_error_fn, HPDF_Alloc_Func user_alloc_fn, HPDF_Free_Func user_free_fn, HPDF_UINT mem_pool_buf_size, void *user_data)
HPDF_NewEx=haru.HPDF_NewEx
HPDF_NewEx.restype=HPDF_Doc
#HPDF_Doc HPDF_New (HPDF_Error_Handler user_error_fn, void *user_data)
HPDF_New=haru.HPDF_New
HPDF_New.restype=HPDF_Doc
#HPDF_STATUS HPDF_SetErrorHandler (HPDF_Doc pdf, HPDF_Error_Handler user_error_fn)
HPDF_SetErrorHandler=haru.HPDF_SetErrorHandler
HPDF_SetErrorHandler.restype=HPDF_STATUS
#void HPDF_Free (HPDF_Doc pdf)
HPDF_Free=haru.HPDF_Free
HPDF_Free.restype=None
#HPDF_STATUS HPDF_NewDoc (HPDF_Doc pdf)
HPDF_NewDoc=haru.HPDF_NewDoc
HPDF_NewDoc.restype=HPDF_STATUS
#void HPDF_FreeDoc (HPDF_Doc pdf)
HPDF_FreeDoc=haru.HPDF_FreeDoc
HPDF_FreeDoc.restype=None
#HPDF_BOOL HPDF_HasDoc (HPDF_Doc pdf)
HPDF_HasDoc=haru.HPDF_HasDoc
HPDF_HasDoc.restype=HPDF_BOOL
#void HPDF_FreeDocAll (HPDF_Doc pdf)
HPDF_FreeDocAll=haru.HPDF_FreeDocAll
HPDF_FreeDocAll.restype=None
#HPDF_STATUS HPDF_SaveToStream (HPDF_Doc pdf)
HPDF_SaveToStream=haru.HPDF_SaveToStream
HPDF_SaveToStream.restype=HPDF_STATUS
#HPDF_UINT32 HPDF_GetStreamSize (HPDF_Doc pdf)
HPDF_GetStreamSize=haru.HPDF_GetStreamSize
HPDF_GetStreamSize.restype=HPDF_UINT32
#HPDF_STATUS HPDF_ReadFromStream (HPDF_Doc pdf, HPDF_BYTE *buf, HPDF_UINT32 *size)
_HPDF_ReadFromStream=haru.HPDF_ReadFromStream
_HPDF_ReadFromStream.restype=HPDF_STATUS
def HPDF_ReadFromStream(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
size, #POINTER(HPDF_UINT32)
):
if type(buf) in (types.ListType, types.TupleType):
size=len(buf)
buf=pointer((HPDF_BYTE*size)(*buf))
size=HPDF_UINT32(int(size))
return _HPDF_ReadFromStream(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
size, #POINTER(HPDF_UINT32)
)
#HPDF_STATUS HPDF_ResetStream (HPDF_Doc pdf)
HPDF_ResetStream=haru.HPDF_ResetStream
HPDF_ResetStream.restype=HPDF_STATUS
#HPDF_STATUS HPDF_SaveToFile (HPDF_Doc pdf, const char *file_name)
HPDF_SaveToFile=haru.HPDF_SaveToFile
HPDF_SaveToFile.restype=HPDF_STATUS
#HPDF_STATUS HPDF_GetError (HPDF_Doc pdf)
HPDF_GetError=haru.HPDF_GetError
HPDF_GetError.restype=HPDF_STATUS
#HPDF_STATUS HPDF_GetErrorDetail (HPDF_Doc pdf)
HPDF_GetErrorDetail=haru.HPDF_GetErrorDetail
HPDF_GetErrorDetail.restype=HPDF_STATUS
#void HPDF_ResetError (HPDF_Doc pdf)
HPDF_ResetError=haru.HPDF_ResetError
HPDF_ResetError.restype=None
#HPDF_STATUS HPDF_SetPagesConfiguration (HPDF_Doc pdf, HPDF_UINT page_per_pages)
_HPDF_SetPagesConfiguration=haru.HPDF_SetPagesConfiguration
_HPDF_SetPagesConfiguration.restype=HPDF_STATUS
def HPDF_SetPagesConfiguration(
pdf, #HPDF_Doc
page_per_pages, #HPDF_UINT
):
page_per_pages=HPDF_UINT(int(page_per_pages))
return _HPDF_SetPagesConfiguration(
pdf, #HPDF_Doc
page_per_pages, #HPDF_UINT
)
#HPDF_Page HPDF_GetPageByIndex (HPDF_Doc pdf, HPDF_UINT index)
HPDF_GetPageByIndex=haru.HPDF_GetPageByIndex
HPDF_GetPageByIndex.restype=HPDF_Page
#---------------------------------------------------------------------------
#---------------------------------------------------------------------------
#HPDF_PageLayout HPDF_GetPageLayout (HPDF_Doc pdf)
HPDF_GetPageLayout=haru.HPDF_GetPageLayout
HPDF_GetPageLayout.restype=HPDF_PageLayout
#HPDF_STATUS HPDF_SetPageLayout (HPDF_Doc pdf, HPDF_PageLayout layout)
HPDF_SetPageLayout=haru.HPDF_SetPageLayout
HPDF_SetPageLayout.restype=HPDF_STATUS
#HPDF_PageMode HPDF_GetPageMode (HPDF_Doc pdf)
HPDF_GetPageMode=haru.HPDF_GetPageMode
HPDF_GetPageMode.restype=HPDF_PageMode
#HPDF_STATUS HPDF_SetPageMode (HPDF_Doc pdf, HPDF_PageMode mode)
HPDF_SetPageMode=haru.HPDF_SetPageMode
HPDF_SetPageMode.restype=HPDF_STATUS
#HPDF_UINT HPDF_GetViewerPreference (HPDF_Doc pdf)
HPDF_GetViewerPreference=haru.HPDF_GetViewerPreference
HPDF_GetViewerPreference.restype=HPDF_UINT
#HPDF_STATUS HPDF_SetViewerPreference (HPDF_Doc pdf, HPDF_UINT value)
HPDF_SetViewerPreference=haru.HPDF_SetViewerPreference
HPDF_SetViewerPreference.restype=HPDF_STATUS
#HPDF_STATUS HPDF_SetOpenAction (HPDF_Doc pdf, HPDF_Destination open_action)
HPDF_SetOpenAction=haru.HPDF_SetOpenAction
HPDF_SetOpenAction.restype=HPDF_STATUS
#---------------------------------------------------------------------------
#----- page handling -------------------------------------------------------
#HPDF_Page HPDF_GetCurrentPage (HPDF_Doc pdf)
HPDF_GetCurrentPage=haru.HPDF_GetCurrentPage
HPDF_GetCurrentPage.restype=HPDF_Page
#HPDF_Page HPDF_AddPage (HPDF_Doc pdf)
HPDF_AddPage=haru.HPDF_AddPage
HPDF_AddPage.restype=HPDF_Page
#HPDF_Page HPDF_InsertPage (HPDF_Doc pdf, HPDF_Page page)
HPDF_InsertPage=haru.HPDF_InsertPage
HPDF_InsertPage.restype=HPDF_Page
#HPDF_STATUS HPDF_Page_SetWidth (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetWidth=haru.HPDF_Page_SetWidth
_HPDF_Page_SetWidth.restype=HPDF_STATUS
def HPDF_Page_SetWidth(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetWidth(
page, #HPDF_Page
value, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_SetHeight (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetHeight=haru.HPDF_Page_SetHeight
_HPDF_Page_SetHeight.restype=HPDF_STATUS
def HPDF_Page_SetHeight(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetHeight(
page, #HPDF_Page
value, #HPDF_REAL
)
#HPDF_STATUS
#HPDF_Page_SetSize (HPDF_Page page,
# HPDF_PageSizes size,
# HPDF_PageDirection direction);
HPDF_Page_SetSize=haru.HPDF_Page_SetSize
HPDF_Page_SetSize.restype=HPDF_STATUS
#HPDF_STATUS HPDF_Page_SetRotate (HPDF_Page page, HPDF_UINT16 angle)
_HPDF_Page_SetRotate=haru.HPDF_Page_SetRotate
_HPDF_Page_SetRotate.restype=HPDF_STATUS
def HPDF_Page_SetRotate(
page, #HPDF_Page
angle, #HPDF_UINT16
):
angle=HPDF_UINT16(int(angle))
return _HPDF_Page_SetRotate(
page, #HPDF_Page
angle, #HPDF_UINT16
)
#---------------------------------------------------------------------------
#----- font handling -------------------------------------------------------
#HPDF_Font HPDF_GetFont (HPDF_Doc pdf, const char *font_name, const char *encoding_name)
HPDF_GetFont=haru.HPDF_GetFont
HPDF_GetFont.restype=HPDF_Font
#const char* HPDF_LoadType1FontFromFile (HPDF_Doc pdf, const char *afm_file_name, const char *data_file_name)
HPDF_LoadType1FontFromFile=haru.HPDF_LoadType1FontFromFile
HPDF_LoadType1FontFromFile.restype=c_char_p
#const char* HPDF_LoadTTFontFromFile (HPDF_Doc pdf, const char *file_name, HPDF_BOOL embedding)
HPDF_LoadTTFontFromFile=haru.HPDF_LoadTTFontFromFile
HPDF_LoadTTFontFromFile.restype=c_char_p
#const char* HPDF_LoadTTFontFromFile2 (HPDF_Doc pdf, const char *file_name, HPDF_UINT index, HPDF_BOOL embedding)
HPDF_LoadTTFontFromFile2=haru.HPDF_LoadTTFontFromFile2
HPDF_LoadTTFontFromFile2.restype=c_char_p
#HPDF_STATUS HPDF_AddPageLabel (HPDF_Doc pdf, HPDF_UINT page_num, HPDF_PageNumStyle style, HPDF_UINT first_page, const char *prefix)
_HPDF_AddPageLabel=haru.HPDF_AddPageLabel
_HPDF_AddPageLabel.restype=HPDF_STATUS
def HPDF_AddPageLabel(
pdf, #HPDF_Doc
page_num, #HPDF_UINT
style, #HPDF_PageNumStyle
first_page, #HPDF_UINT
prefix, #c_char_p
):
page_num, first_page=[HPDF_UINT(int(i))for i in (page_num, first_page)]
return _HPDF_AddPageLabel(
pdf, #HPDF_Doc
page_num, #HPDF_UINT
style, #HPDF_PageNumStyle
first_page, #HPDF_UINT
prefix, #c_char_p
)
#HPDF_STATUS HPDF_UseJPFonts (HPDF_Doc pdf)
HPDF_UseJPFonts=haru.HPDF_UseJPFonts
HPDF_UseJPFonts.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseKRFonts (HPDF_Doc pdf)
HPDF_UseKRFonts=haru.HPDF_UseKRFonts
HPDF_UseKRFonts.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseCNSFonts (HPDF_Doc pdf)
HPDF_UseCNSFonts=haru.HPDF_UseCNSFonts
HPDF_UseCNSFonts.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseCNTFonts (HPDF_Doc pdf)
HPDF_UseCNTFonts=haru.HPDF_UseCNTFonts
HPDF_UseCNTFonts.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- outline ------------------------------------------------------------
#HPDF_Outline HPDF_CreateOutline (HPDF_Doc pdf, HPDF_Outline parent, const char *title, HPDF_Encoder encoder)
HPDF_CreateOutline=haru.HPDF_CreateOutline
HPDF_CreateOutline.restype=HPDF_Outline
#HPDF_STATUS HPDF_Outline_SetOpened (HPDF_Outline outline, HPDF_BOOL opened)
HPDF_Outline_SetOpened=haru.HPDF_Outline_SetOpened
HPDF_Outline_SetOpened.restype=HPDF_STATUS
#HPDF_STATUS HPDF_Outline_SetDestination (HPDF_Outline outline, HPDF_Destination dst)
HPDF_Outline_SetDestination=haru.HPDF_Outline_SetDestination
HPDF_Outline_SetDestination.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- destination --------------------------------------------------------
#HPDF_Destination HPDF_Page_CreateDestination (HPDF_Page page)
HPDF_Page_CreateDestination=haru.HPDF_Page_CreateDestination
HPDF_Page_CreateDestination.restype=HPDF_Destination
#HPDF_STATUS HPDF_Destination_SetXYZ (HPDF_Destination dst, HPDF_REAL left, HPDF_REAL top, HPDF_REAL zoom)
_HPDF_Destination_SetXYZ=haru.HPDF_Destination_SetXYZ
_HPDF_Destination_SetXYZ.restype=HPDF_STATUS
def HPDF_Destination_SetXYZ(
dst, #HPDF_Destination
left, #HPDF_REAL
top, #HPDF_REAL
zoom, #HPDF_REAL
):
left=HPDF_REAL(left)
top=HPDF_REAL(top)
zoom=HPDF_REAL(zoom)
return _HPDF_Destination_SetXYZ(
dst, #HPDF_Destination
left, #HPDF_REAL
top, #HPDF_REAL
zoom, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFit (HPDF_Destination dst)
HPDF_Destination_SetFit=haru.HPDF_Destination_SetFit
HPDF_Destination_SetFit.restype=HPDF_STATUS
#HPDF_STATUS HPDF_Destination_SetFitH (HPDF_Destination dst, HPDF_REAL top)
_HPDF_Destination_SetFitH=haru.HPDF_Destination_SetFitH
_HPDF_Destination_SetFitH.restype=HPDF_STATUS
def HPDF_Destination_SetFitH(
dst, #HPDF_Destination
top, #HPDF_REAL
):
top=HPDF_REAL(top)
return _HPDF_Destination_SetFitH(
dst, #HPDF_Destination
top, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFitV (HPDF_Destination dst, HPDF_REAL left)
_HPDF_Destination_SetFitV=haru.HPDF_Destination_SetFitV
_HPDF_Destination_SetFitV.restype=HPDF_STATUS
def HPDF_Destination_SetFitV(
dst, #HPDF_Destination
left, #HPDF_REAL
):
left=HPDF_REAL(left)
return _HPDF_Destination_SetFitV(
dst, #HPDF_Destination
left, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFitR (HPDF_Destination dst, HPDF_REAL left, HPDF_REAL bottom, HPDF_REAL right, HPDF_REAL top)
_HPDF_Destination_SetFitR=haru.HPDF_Destination_SetFitR
_HPDF_Destination_SetFitR.restype=HPDF_STATUS
def HPDF_Destination_SetFitR(
dst, #HPDF_Destination
left, #HPDF_REAL
bottom, #HPDF_REAL
right, #HPDF_REAL
top, #HPDF_REAL
):
left=HPDF_REAL(left)
bottom=HPDF_REAL(bottom)
right=HPDF_REAL(right)
top=HPDF_REAL(top)
return _HPDF_Destination_SetFitR(
dst, #HPDF_Destination
left, #HPDF_REAL
bottom, #HPDF_REAL
right, #HPDF_REAL
top, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFitB (HPDF_Destination dst)
HPDF_Destination_SetFitB=haru.HPDF_Destination_SetFitB
HPDF_Destination_SetFitB.restype=HPDF_STATUS
#HPDF_STATUS HPDF_Destination_SetFitBH (HPDF_Destination dst, HPDF_REAL top)
_HPDF_Destination_SetFitBH=haru.HPDF_Destination_SetFitBH
_HPDF_Destination_SetFitBH.restype=HPDF_STATUS
def HPDF_Destination_SetFitBH(
dst, #HPDF_Destination
top, #HPDF_REAL
):
top=HPDF_REAL(top)
return _HPDF_Destination_SetFitBH(
dst, #HPDF_Destination
top, #HPDF_REAL
)
#HPDF_STATUS HPDF_Destination_SetFitBV (HPDF_Destination dst, HPDF_REAL left)
_HPDF_Destination_SetFitBV=haru.HPDF_Destination_SetFitBV
_HPDF_Destination_SetFitBV.restype=HPDF_STATUS
def HPDF_Destination_SetFitBV(
dst, #HPDF_Destination
left, #HPDF_REAL
):
left=HPDF_REAL(left)
return _HPDF_Destination_SetFitBV(
dst, #HPDF_Destination
left, #HPDF_REAL
)
#--------------------------------------------------------------------------
#----- encoder ------------------------------------------------------------
#HPDF_Encoder HPDF_GetEncoder (HPDF_Doc pdf, const char *encoding_name)
HPDF_GetEncoder=haru.HPDF_GetEncoder
HPDF_GetEncoder.restype=HPDF_Encoder
#HPDF_Encoder HPDF_GetCurrentEncoder (HPDF_Doc pdf)
HPDF_GetCurrentEncoder=haru.HPDF_GetCurrentEncoder
HPDF_GetCurrentEncoder.restype=HPDF_Encoder
#HPDF_STATUS HPDF_SetCurrentEncoder (HPDF_Doc pdf, const char *encoding_name)
HPDF_SetCurrentEncoder=haru.HPDF_SetCurrentEncoder
HPDF_SetCurrentEncoder.restype=HPDF_STATUS
#HPDF_EncoderType HPDF_Encoder_GetType (HPDF_Encoder encoder)
HPDF_Encoder_GetType=haru.HPDF_Encoder_GetType
HPDF_Encoder_GetType.restype=HPDF_EncoderType
#HPDF_ByteType HPDF_Encoder_GetByteType (HPDF_Encoder encoder, const char *text, HPDF_UINT index)
_HPDF_Encoder_GetByteType=haru.HPDF_Encoder_GetByteType
_HPDF_Encoder_GetByteType.restype=HPDF_ByteType
def HPDF_Encoder_GetByteType(
encoder, #HPDF_Encoder
text, #const char *
index #HPDF_UINT
):
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Encoder_GetByteType(
encoder, #HPDF_Encoder
text, #const char *
index #HPDF_UINT
)
#HPDF_UNICODE HPDF_Encoder_GetUnicode (HPDF_Encoder encoder, HPDF_UINT16 code)
HPDF_Encoder_GetUnicode=haru.HPDF_Encoder_GetUnicode
HPDF_Encoder_GetUnicode.restype=HPDF_UNICODE
#HPDF_WritingMode HPDF_Encoder_GetWritingMode (HPDF_Encoder encoder)
HPDF_Encoder_GetWritingMode=haru.HPDF_Encoder_GetWritingMode
HPDF_Encoder_GetWritingMode.restype=HPDF_WritingMode
#HPDF_STATUS HPDF_UseJPEncodings (HPDF_Doc pdf)
HPDF_UseJPEncodings=haru.HPDF_UseJPEncodings
HPDF_UseJPEncodings.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseKREncodings (HPDF_Doc pdf)
HPDF_UseKREncodings=haru.HPDF_UseKREncodings
HPDF_UseKREncodings.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseCNSEncodings (HPDF_Doc pdf)
HPDF_UseCNSEncodings=haru.HPDF_UseCNSEncodings
HPDF_UseCNSEncodings.restype=HPDF_STATUS
#HPDF_STATUS HPDF_UseCNTEncodings (HPDF_Doc pdf)
HPDF_UseCNTEncodings=haru.HPDF_UseCNTEncodings
HPDF_UseCNTEncodings.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- annotation ---------------------------------------------------------
#HPDF_Annotation HPDF_Page_CreateTextAnnot (HPDF_Page page, HPDF_Rect rect, const char *text, HPDF_Encoder encoder)
HPDF_Page_CreateTextAnnot=haru.HPDF_Page_CreateTextAnnot
HPDF_Page_CreateTextAnnot.restype=HPDF_Annotation
#HPDF_Annotation HPDF_Page_CreateLinkAnnot (HPDF_Page page, HPDF_Rect rect, HPDF_Destination dst)
HPDF_Page_CreateLinkAnnot=haru.HPDF_Page_CreateLinkAnnot
HPDF_Page_CreateLinkAnnot.restype=HPDF_Annotation
#HPDF_Annotation HPDF_Page_CreateURILinkAnnot (HPDF_Page page, HPDF_Rect rect, const char *uri)
HPDF_Page_CreateURILinkAnnot=haru.HPDF_Page_CreateURILinkAnnot
HPDF_Page_CreateURILinkAnnot.restype=HPDF_Annotation
#HPDF_STATUS HPDF_LinkAnnot_SetHighlightMode (HPDF_Annotation annot, HPDF_AnnotHighlightMode mode)
HPDF_LinkAnnot_SetHighlightMode=haru.HPDF_LinkAnnot_SetHighlightMode
HPDF_LinkAnnot_SetHighlightMode.restype=HPDF_STATUS
#HPDF_STATUS HPDF_LinkAnnot_SetBorderStyle (HPDF_Annotation annot, HPDF_REAL width, HPDF_UINT16 dash_on, HPDF_UINT16 dash_off)
_HPDF_LinkAnnot_SetBorderStyle=haru.HPDF_LinkAnnot_SetBorderStyle
_HPDF_LinkAnnot_SetBorderStyle.restype=HPDF_STATUS
def HPDF_LinkAnnot_SetBorderStyle(
annot, #HPDF_Annotation
width, #HPDF_REAL
dash_on, #HPDF_UINT16
dash_off, #HPDF_UINT16
):
width=HPDF_REAL(width)
dash_on=HPDF_UINT16(dash_on)
dash_off=HPDF_UINT16(dash_off)
return _HPDF_LinkAnnot_SetBorderStyle(
annot, #HPDF_Annotation
width, #HPDF_REAL
dash_on, #HPDF_UINT16
dash_off, #HPDF_UINT16
)
#HPDF_STATUS HPDF_TextAnnot_SetIcon (HPDF_Annotation annot, HPDF_AnnotIcon icon)
HPDF_TextAnnot_SetIcon=haru.HPDF_TextAnnot_SetIcon
HPDF_TextAnnot_SetIcon.restype=HPDF_STATUS
#HPDF_STATUS HPDF_TextAnnot_SetOpened (HPDF_Annotation annot, HPDF_BOOL opened)
HPDF_TextAnnot_SetOpened=haru.HPDF_TextAnnot_SetOpened
HPDF_TextAnnot_SetOpened.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- image data ---------------------------------------------------------
#HPDF_Image HPDF_LoadPngImageFromFile (HPDF_Doc pdf, const char *filename)
HPDF_LoadPngImageFromFile=haru.HPDF_LoadPngImageFromFile
HPDF_LoadPngImageFromFile.restype=HPDF_Image
#HPDF_Image HPDF_LoadPngImageFromFile2 (HPDF_Doc pdf, const char *filename)
HPDF_LoadPngImageFromFile2=haru.HPDF_LoadPngImageFromFile2
HPDF_LoadPngImageFromFile2.restype=HPDF_Image
#HPDF_Image HPDF_LoadJpegImageFromFile (HPDF_Doc pdf, const char *filename)
HPDF_LoadJpegImageFromFile=haru.HPDF_LoadJpegImageFromFile
HPDF_LoadJpegImageFromFile.restype=HPDF_Image
#HPDF_Image HPDF_LoadRawImageFromFile (HPDF_Doc pdf, const char *filename, HPDF_UINT width, HPDF_UINT height, HPDF_ColorSpace color_space)
_HPDF_LoadRawImageFromFile=haru.HPDF_LoadRawImageFromFile
_HPDF_LoadRawImageFromFile.restype=HPDF_Image
def HPDF_LoadRawImageFromFile(
pdf, #HPDF_Doc
filename, #c_char_p
width, #HPDF_UINT
height, #HPDF_UINT
color_space, #HPDF_ColorSpace
):
width=HPDF_UINT(width)
height=HPDF_UINT(height)
return _HPDF_LoadRawImageFromFile(
pdf, #HPDF_Doc
filename, #c_char_p
width, #HPDF_UINT
height, #HPDF_UINT
color_space, #HPDF_ColorSpace
)
#HPDF_Image HPDF_LoadRawImageFromMem (HPDF_Doc pdf, const HPDF_BYTE *buf, HPDF_UINT width, HPDF_UINT height, HPDF_ColorSpace color_space, HPDF_UINT bits_per_component)
_HPDF_LoadRawImageFromMem=haru.HPDF_LoadRawImageFromMem
_HPDF_LoadRawImageFromMem.restype=HPDF_Image
def HPDF_LoadRawImageFromMem(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
width, #HPDF_UINT
height, #HPDF_UINT
color_space, #HPDF_ColorSpace
bits_per_component, #HPDF_UINT
):
if type(buf) in (types.ListType, types.TupleType):
size=len(buf)
buf=pointer((HPDF_BYTE*size)(*buf))
if height in [0, None]:
height=size/width
width=HPDF_UINT(width)
height=HPDF_UINT(height)
bits_per_component=HPDF_UINT(bits_per_component)
return _HPDF_LoadRawImageFromMem(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
width, #HPDF_UINT
height, #HPDF_UINT
color_space, #HPDF_ColorSpace
bits_per_component, #HPDF_UINT
)
#HPDF_Point HPDF_Image_GetSize (HPDF_Image image)
HPDF_Image_GetSize=haru.HPDF_Image_GetSize
HPDF_Image_GetSize.restype=HPDF_Point
#HPDF_STATUS HPDF_Image_GetSize2 (HPDF_Image image, HPDF_Point *size)
_HPDF_Image_GetSize2=haru.HPDF_Image_GetSize2
_HPDF_Image_GetSize2.restype=HPDF_STATUS
def HPDF_Image_GetSize2(
image, #HPDF_Image
size=None, #POINTER(HPDF_Point)
):
size=HPDF_Point
ret= _HPDF_Image_GetSize2(
image, #HPDF_Image
size, #POINTER(HPDF_Point)
)
return ret, size.x, size.y
#HPDF_UINT HPDF_Image_GetWidth (HPDF_Image image)
HPDF_Image_GetWidth=haru.HPDF_Image_GetWidth
HPDF_Image_GetWidth.restype=HPDF_UINT
#HPDF_UINT HPDF_Image_GetHeight (HPDF_Image image)
HPDF_Image_GetHeight=haru.HPDF_Image_GetHeight
HPDF_Image_GetHeight.restype=HPDF_UINT
#HPDF_UINT HPDF_Image_GetBitsPerComponent (HPDF_Image image)
HPDF_Image_GetBitsPerComponent=haru.HPDF_Image_GetBitsPerComponent
HPDF_Image_GetBitsPerComponent.restype=HPDF_UINT
#const char* HPDF_Image_GetColorSpace (HPDF_Image image)
HPDF_Image_GetColorSpace=haru.HPDF_Image_GetColorSpace
HPDF_Image_GetColorSpace.restype=c_char_p
#HPDF_STATUS HPDF_Image_SetColorMask (HPDF_Image image, HPDF_UINT rmin, HPDF_UINT rmax, HPDF_UINT gmin, HPDF_UINT gmax, HPDF_UINT bmin, HPDF_UINT bmax)
_HPDF_Image_SetColorMask=haru.HPDF_Image_SetColorMask
_HPDF_Image_SetColorMask.restype=HPDF_STATUS
def HPDF_Image_SetColorMask(
image, #HPDF_Image
rmin, #HPDF_UINT
rmax, #HPDF_UINT
gmin, #HPDF_UINT
gmax, #HPDF_UINT
bmin, #HPDF_UINT
bmax, #HPDF_UINT
):
rmin=HPDF_UINT(rmin)
rmax=HPDF_UINT(rmax)
gmin=HPDF_UINT(gmin)
gmax=HPDF_UINT(gmax)
bmin=HPDF_UINT(bmin)
bmax=HPDF_UINT(bmax)
return _HPDF_Image_SetColorMask(
image, #HPDF_Image
rmin, #HPDF_UINT
rmax, #HPDF_UINT
gmin, #HPDF_UINT
gmax, #HPDF_UINT
bmin, #HPDF_UINT
bmax, #HPDF_UINT
)
#HPDF_STATUS HPDF_Image_SetMaskImage (HPDF_Image image, HPDF_Image mask_image)
HPDF_Image_SetMaskImage=haru.HPDF_Image_SetMaskImage
HPDF_Image_SetMaskImage.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- info dictionary ----------------------------------------------------
#HPDF_STATUS HPDF_SetInfoAttr (HPDF_Doc pdf, HPDF_InfoType type, const char *value)
HPDF_SetInfoAttr=haru.HPDF_SetInfoAttr
HPDF_SetInfoAttr.restype=HPDF_STATUS
#const char* HPDF_GetInfoAttr (HPDF_Doc pdf, HPDF_InfoType type)
HPDF_GetInfoAttr=haru.HPDF_GetInfoAttr
HPDF_GetInfoAttr.restype=c_char_p
#HPDF_STATUS HPDF_SetInfoDateAttr (HPDF_Doc pdf, HPDF_InfoType type, HPDF_Date value)
HPDF_SetInfoDateAttr=haru.HPDF_SetInfoDateAttr
HPDF_SetInfoDateAttr.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- encryption ---------------------------------------------------------
#HPDF_STATUS HPDF_SetPassword (HPDF_Doc pdf, const char *owner_passwd, const char *user_passwd)
HPDF_SetPassword=haru.HPDF_SetPassword
HPDF_SetPassword.restype=HPDF_STATUS
#HPDF_STATUS HPDF_SetPermission (HPDF_Doc pdf, HPDF_UINT permission)
_HPDF_SetPermission=haru.HPDF_SetPermission
_HPDF_SetPermission.restype=HPDF_STATUS
def HPDF_SetPermission(
pdf, #HPDF_Doc
permission, #HPDF_UINT
):
permission=HPDF_UINT(int(permission))
return _HPDF_SetPermission(
pdf, #HPDF_Doc
permission, #HPDF_UINT
)
#HPDF_STATUS HPDF_SetEncryptionMode (HPDF_Doc pdf, HPDF_EncryptMode mode, HPDF_UINT key_len)
_HPDF_SetEncryptionMode=haru.HPDF_SetEncryptionMode
_HPDF_SetEncryptionMode.restype=HPDF_STATUS
def HPDF_SetEncryptionMode(
pdf, #HPDF_Doc
mode, #HPDF_EncryptMode
key_len, #HPDF_UINT
):
key_len=HPDF_UINT(int(key_len))
return _HPDF_SetEncryptionMode(
pdf, #HPDF_Doc
mode, #HPDF_EncryptMode
key_len, #HPDF_UINT
)
#--------------------------------------------------------------------------
#----- compression --------------------------------------------------------
#HPDF_STATUS HPDF_SetCompressionMode (HPDF_Doc pdf, HPDF_UINT mode)
HPDF_SetCompressionMode=haru.HPDF_SetCompressionMode
HPDF_SetCompressionMode.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#----- font ---------------------------------------------------------------
#const char* HPDF_Font_GetFontName (HPDF_Font font)
HPDF_Font_GetFontName=haru.HPDF_Font_GetFontName
HPDF_Font_GetFontName.restype=c_char_p
#const char* HPDF_Font_GetEncodingName (HPDF_Font font)
HPDF_Font_GetEncodingName=haru.HPDF_Font_GetEncodingName
HPDF_Font_GetEncodingName.restype=c_char_p
#HPDF_INT HPDF_Font_GetUnicodeWidth (HPDF_Font font, HPDF_UNICODE code)
HPDF_Font_GetUnicodeWidth=haru.HPDF_Font_GetUnicodeWidth
HPDF_Font_GetUnicodeWidth.restype=HPDF_INT
#HPDF_Box HPDF_Font_GetBBox (HPDF_Font font)
HPDF_Font_GetBBox=haru.HPDF_Font_GetBBox
HPDF_Font_GetBBox.restype=HPDF_Box
#HPDF_INT HPDF_Font_GetAscent (HPDF_Font font)
HPDF_Font_GetAscent=haru.HPDF_Font_GetAscent
HPDF_Font_GetAscent.restype=HPDF_INT
#HPDF_INT HPDF_Font_GetDescent (HPDF_Font font)
HPDF_Font_GetDescent=haru.HPDF_Font_GetDescent
HPDF_Font_GetDescent.restype=HPDF_INT
#HPDF_UINT HPDF_Font_GetXHeight (HPDF_Font font)
HPDF_Font_GetXHeight=haru.HPDF_Font_GetXHeight
HPDF_Font_GetXHeight.restype=HPDF_UINT
#HPDF_UINT HPDF_Font_GetCapHeight (HPDF_Font font)
HPDF_Font_GetCapHeight=haru.HPDF_Font_GetCapHeight
HPDF_Font_GetCapHeight.restype=HPDF_UINT
#HPDF_TextWidth HPDF_Font_TextWidth (HPDF_Font font, const HPDF_BYTE *text, HPDF_UINT len)
HPDF_Font_TextWidth=haru.HPDF_Font_TextWidth
HPDF_Font_TextWidth.restype=HPDF_TextWidth
#HPDF_UINT HPDF_Font_MeasureText (HPDF_Font font, const HPDF_BYTE *text, HPDF_UINT len, HPDF_REAL width, HPDF_REAL font_size, HPDF_REAL char_space, HPDF_REAL word_space, HPDF_BOOL wordwrap, HPDF_REAL *real_width)
_HPDF_Font_MeasureText=haru.HPDF_Font_MeasureText
_HPDF_Font_MeasureText.restype=HPDF_UINT
def HPDF_Font_MeasureText(
font, #HPDF_Font
text, #POINTER(HPDF_BYTE)
length, #HPDF_UINT
width, #HPDF_REAL
font_size, #HPDF_REAL
char_space, #HPDF_REAL
word_space, #HPDF_REAL
wordwrap, #HPDF_BOOL
real_width, #POINTER(HPDF_REAL)
):
if type(text) in (types.TupleType, types.ListType):
length=len(text)
text=pointer((HPDF_BYTE*length)(*text))
length=HPDF_UINT(int(length))
width=HPDF_REAL(width)
font_size=HPDF_REAL(font_size)
char_space=HPDF_REAL(char_space)
word_space=HPDF_REAL(word_space)
real_width=HPDF_REAL(real_width)
return _HPDF_Font_MeasureText(
font, #HPDF_Font
text, #POINTER(HPDF_BYTE)
length, #HPDF_UINT
width, #HPDF_REAL
font_size, #HPDF_REAL
char_space, #HPDF_REAL
word_space, #HPDF_REAL
wordwrap, #HPDF_BOOL
real_width, #POINTER(HPDF_REAL)
)
#--------------------------------------------------------------------------
#----- extended graphics state --------------------------------------------
#HPDF_ExtGState HPDF_CreateExtGState (HPDF_Doc pdf)
HPDF_CreateExtGState=haru.HPDF_CreateExtGState
HPDF_CreateExtGState.restype=HPDF_ExtGState
#HPDF_STATUS HPDF_ExtGState_SetAlphaStroke (HPDF_ExtGState ext_gstate, HPDF_REAL value)
_HPDF_ExtGState_SetAlphaStroke=haru.HPDF_ExtGState_SetAlphaStroke
_HPDF_ExtGState_SetAlphaStroke.restype=HPDF_STATUS
def HPDF_ExtGState_SetAlphaStroke(
ext_gstate, #HPDF_ExtGState
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_ExtGState_SetAlphaStroke(
ext_gstate, #HPDF_ExtGState
value, #HPDF_REAL
)
#HPDF_STATUS HPDF_ExtGState_SetAlphaFill (HPDF_ExtGState ext_gstate, HPDF_REAL value)
_HPDF_ExtGState_SetAlphaFill=haru.HPDF_ExtGState_SetAlphaFill
_HPDF_ExtGState_SetAlphaFill.restype=HPDF_STATUS
def HPDF_ExtGState_SetAlphaFill(
ext_gstate, #HPDF_ExtGState
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_ExtGState_SetAlphaFill(
ext_gstate, #HPDF_ExtGState
value, #HPDF_REAL
)
#HPDF_STATUS HPDF_ExtGState_SetBlendMode (HPDF_ExtGState ext_gstate, HPDF_BlendMode mode)
HPDF_ExtGState_SetBlendMode=haru.HPDF_ExtGState_SetBlendMode
HPDF_ExtGState_SetBlendMode.restype=HPDF_STATUS
#--------------------------------------------------------------------------
#--------------------------------------------------------------------------
#HPDF_REAL HPDF_Page_TextWidth (HPDF_Page page, const char *text)
_HPDF_Page_TextWidth=haru.HPDF_Page_TextWidth
_HPDF_Page_TextWidth.restype=HPDF_REAL
def HPDF_Page_TextWidth(
page, #HPDF_Page
text, #c_char_p
):
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Page_TextWidth(
page, #HPDF_Page
text, #c_char_p
)
#HPDF_UINT HPDF_Page_MeasureText (HPDF_Page page, const char *text, HPDF_REAL width, HPDF_BOOL wordwrap, HPDF_REAL *real_width)
_HPDF_Page_MeasureText=haru.HPDF_Page_MeasureText
_HPDF_Page_MeasureText.restype=HPDF_UINT
def HPDF_Page_MeasureText(
page, #HPDF_Page
text, #c_char_p
width, #HPDF_REAL
wordwrap, #HPDF_BOOL
real_width, #POINTER(HPDF_REAL)
):
width=HPDF_REAL(width)
real_width=HPDF_REAL(real_width)
return _HPDF_Page_MeasureText(
page, #HPDF_Page
text, #c_char_p
width, #HPDF_REAL
wordwrap, #HPDF_BOOL
real_width, #POINTER(HPDF_REAL)
)
#HPDF_REAL
#HPDF_Page_GetWidth (HPDF_Page page);
HPDF_Page_GetWidth=haru.HPDF_Page_GetWidth
HPDF_Page_GetWidth.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetHeight (HPDF_Page page)
HPDF_Page_GetHeight=haru.HPDF_Page_GetHeight
HPDF_Page_GetHeight.restype=HPDF_REAL
#HPDF_UINT16 HPDF_Page_GetGMode (HPDF_Page page)
HPDF_Page_GetGMode=haru.HPDF_Page_GetGMode
HPDF_Page_GetGMode.restype=HPDF_UINT16
#HPDF_Point HPDF_Page_GetCurrentPos (HPDF_Page page)
HPDF_Page_GetCurrentPos=haru.HPDF_Page_GetCurrentPos
HPDF_Page_GetCurrentPos.restype=HPDF_Point
#HPDF_STATUS HPDF_Page_GetCurrentPos2 (HPDF_Page page, HPDF_Point *pos)
_HPDF_Page_GetCurrentPos2=haru.HPDF_Page_GetCurrentPos2
_HPDF_Page_GetCurrentPos2.restype=HPDF_STATUS
def HPDF_Page_GetCurrentPos2(
page, #HPDF_Page
pos=None, #POINTER(HPDF_Point)
):
pos=HPDF_Point()
ret= _HPDF_Page_GetCurrentPos2(
page, #HPDF_Page
pos, #POINTER(HPDF_Point)
)
return ret, pos.x, pos.y
#HPDF_Point HPDF_Page_GetCurrentTextPos (HPDF_Page page)
HPDF_Page_GetCurrentTextPos=haru.HPDF_Page_GetCurrentTextPos
HPDF_Page_GetCurrentTextPos.restype=HPDF_Point
#HPDF_STATUS HPDF_Page_GetCurrentTextPos2 (HPDF_Page page, HPDF_Point *pos)
_HPDF_Page_GetCurrentTextPos2=haru.HPDF_Page_GetCurrentTextPos2
_HPDF_Page_GetCurrentTextPos2.restype=HPDF_STATUS
def HPDF_Page_GetCurrentTextPos2(
page, #HPDF_Page
pos=None, #POINTER(HPDF_Point)
):
pos=HPDF_Point()
ret= _HPDF_Page_GetCurrentTextPos2(
page, #HPDF_Page
pos, #POINTER(HPDF_Point)
)
return ret, pos.x, pos.y
#HPDF_Font HPDF_Page_GetCurrentFont (HPDF_Page page)
HPDF_Page_GetCurrentFont=haru.HPDF_Page_GetCurrentFont
HPDF_Page_GetCurrentFont.restype=HPDF_Font
#HPDF_REAL HPDF_Page_GetCurrentFontSize (HPDF_Page page)
HPDF_Page_GetCurrentFontSize=haru.HPDF_Page_GetCurrentFontSize
HPDF_Page_GetCurrentFontSize.restype=HPDF_REAL
#HPDF_TransMatrix HPDF_Page_GetTransMatrix (HPDF_Page page)
HPDF_Page_GetTransMatrix=haru.HPDF_Page_GetTransMatrix
HPDF_Page_GetTransMatrix.restype=HPDF_TransMatrix
#HPDF_REAL HPDF_Page_GetLineWidth (HPDF_Page page)
HPDF_Page_GetLineWidth=haru.HPDF_Page_GetLineWidth
HPDF_Page_GetLineWidth.restype=HPDF_REAL
#HPDF_LineCap HPDF_Page_GetLineCap (HPDF_Page page)
HPDF_Page_GetLineCap=haru.HPDF_Page_GetLineCap
HPDF_Page_GetLineCap.restype=HPDF_LineCap
#HPDF_LineJoin HPDF_Page_GetLineJoin (HPDF_Page page)
HPDF_Page_GetLineJoin=haru.HPDF_Page_GetLineJoin
HPDF_Page_GetLineJoin.restype=HPDF_LineJoin
#HPDF_REAL HPDF_Page_GetMiterLimit (HPDF_Page page)
HPDF_Page_GetMiterLimit=haru.HPDF_Page_GetMiterLimit
HPDF_Page_GetMiterLimit.restype=HPDF_REAL
#HPDF_DashMode HPDF_Page_GetDash (HPDF_Page page)
HPDF_Page_GetDash=haru.HPDF_Page_GetDash
HPDF_Page_GetDash.restype=HPDF_DashMode
#HPDF_REAL HPDF_Page_GetFlat (HPDF_Page page)
HPDF_Page_GetFlat=haru.HPDF_Page_GetFlat
HPDF_Page_GetFlat.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetCharSpace (HPDF_Page page)
HPDF_Page_GetCharSpace=haru.HPDF_Page_GetCharSpace
HPDF_Page_GetCharSpace.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetWordSpace (HPDF_Page page)
HPDF_Page_GetWordSpace=haru.HPDF_Page_GetWordSpace
HPDF_Page_GetWordSpace.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetHorizontalScalling (HPDF_Page page)
HPDF_Page_GetHorizontalScalling=haru.HPDF_Page_GetHorizontalScalling
HPDF_Page_GetHorizontalScalling.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetTextLeading (HPDF_Page page)
HPDF_Page_GetTextLeading=haru.HPDF_Page_GetTextLeading
HPDF_Page_GetTextLeading.restype=HPDF_REAL
#HPDF_TextRenderingMode HPDF_Page_GetTextRenderingMode (HPDF_Page page)
HPDF_Page_GetTextRenderingMode=haru.HPDF_Page_GetTextRenderingMode
HPDF_Page_GetTextRenderingMode.restype=HPDF_TextRenderingMode
# This function is obsolete. Use HPDF_Page_GetTextRise.
#HPDF_REAL HPDF_Page_GetTextRaise (HPDF_Page page)
HPDF_Page_GetTextRaise=haru.HPDF_Page_GetTextRaise
HPDF_Page_GetTextRaise.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetTextRise (HPDF_Page page)
HPDF_Page_GetTextRise=haru.HPDF_Page_GetTextRise
HPDF_Page_GetTextRise.restype=HPDF_REAL
#HPDF_RGBColor HPDF_Page_GetRGBFill (HPDF_Page page)
HPDF_Page_GetRGBFill=haru.HPDF_Page_GetRGBFill
HPDF_Page_GetRGBFill.restype=HPDF_RGBColor
#HPDF_RGBColor HPDF_Page_GetRGBStroke (HPDF_Page page)
HPDF_Page_GetRGBStroke=haru.HPDF_Page_GetRGBStroke
HPDF_Page_GetRGBStroke.restype=HPDF_RGBColor
#HPDF_CMYKColor HPDF_Page_GetCMYKFill (HPDF_Page page)
HPDF_Page_GetCMYKFill=haru.HPDF_Page_GetCMYKFill
HPDF_Page_GetCMYKFill.restype=HPDF_CMYKColor
#HPDF_CMYKColor HPDF_Page_GetCMYKStroke (HPDF_Page page)
HPDF_Page_GetCMYKStroke=haru.HPDF_Page_GetCMYKStroke
HPDF_Page_GetCMYKStroke.restype=HPDF_CMYKColor
#HPDF_REAL HPDF_Page_GetGrayFill (HPDF_Page page)
HPDF_Page_GetGrayFill=haru.HPDF_Page_GetGrayFill
HPDF_Page_GetGrayFill.restype=HPDF_REAL
#HPDF_REAL HPDF_Page_GetGrayStroke (HPDF_Page page)
HPDF_Page_GetGrayStroke=haru.HPDF_Page_GetGrayStroke
HPDF_Page_GetGrayStroke.restype=HPDF_REAL
#HPDF_ColorSpace HPDF_Page_GetStrokingColorSpace (HPDF_Page page)
HPDF_Page_GetStrokingColorSpace=haru.HPDF_Page_GetStrokingColorSpace
HPDF_Page_GetStrokingColorSpace.restype=HPDF_ColorSpace
#HPDF_ColorSpace HPDF_Page_GetFillingColorSpace (HPDF_Page page)
HPDF_Page_GetFillingColorSpace=haru.HPDF_Page_GetFillingColorSpace
HPDF_Page_GetFillingColorSpace.restype=HPDF_ColorSpace
#HPDF_TransMatrix HPDF_Page_GetTextMatrix (HPDF_Page page)
HPDF_Page_GetTextMatrix=haru.HPDF_Page_GetTextMatrix
HPDF_Page_GetTextMatrix.restype=HPDF_TransMatrix
#HPDF_UINT HPDF_Page_GetGStateDepth (HPDF_Page page)
HPDF_Page_GetGStateDepth=haru.HPDF_Page_GetGStateDepth
HPDF_Page_GetGStateDepth.restype=HPDF_UINT
#--------------------------------------------------------------------------
#----- GRAPHICS OPERATORS -------------------------------------------------
#--- General graphics state ---------------------------------------------
# w
#HPDF_STATUS HPDF_Page_SetLineWidth (HPDF_Page page, HPDF_REAL line_width)
_HPDF_Page_SetLineWidth=haru.HPDF_Page_SetLineWidth
_HPDF_Page_SetLineWidth.restype=HPDF_STATUS
def HPDF_Page_SetLineWidth(
page, #HPDF_Page
line_width, #HPDF_REAL
):
line_width=HPDF_REAL(line_width)
return _HPDF_Page_SetLineWidth(
page, #HPDF_Page
line_width, #HPDF_REAL
)
# J
#HPDF_STATUS HPDF_Page_SetLineCap (HPDF_Page page, HPDF_LineCap line_cap)
HPDF_Page_SetLineCap=haru.HPDF_Page_SetLineCap
HPDF_Page_SetLineCap.restype=HPDF_STATUS
# j
#HPDF_STATUS HPDF_Page_SetLineJoin (HPDF_Page page, HPDF_LineJoin line_join)
HPDF_Page_SetLineJoin=haru.HPDF_Page_SetLineJoin
HPDF_Page_SetLineJoin.restype=HPDF_STATUS
# M
#HPDF_STATUS HPDF_Page_SetMiterLimit (HPDF_Page page, HPDF_REAL miter_limit)
_HPDF_Page_SetMiterLimit=haru.HPDF_Page_SetMiterLimit
_HPDF_Page_SetMiterLimit.restype=HPDF_STATUS
def HPDF_Page_SetMiterLimit(
page, #HPDF_Page
miter_limit, #HPDF_REAL
):
miter_limit=HPDF_REAL(miter_limit)
return _HPDF_Page_SetMiterLimit(
page, #HPDF_Page
miter_limit, #HPDF_REAL
)
# d
#HPDF_STATUS HPDF_Page_SetDash (HPDF_Page page, const HPDF_UINT16 *dash_ptn, HPDF_UINT num_param, HPDF_UINT phase)
_HPDF_Page_SetDash=haru.HPDF_Page_SetDash
_HPDF_Page_SetDash.restype=HPDF_STATUS
def HPDF_Page_SetDash(
page, #HPDF_Page
dash_ptn, #POINTER(HPDF_UINT16)
num_param, #HPDF_UINT
phase, #HPDF_UINT
):
if type(dash_ptn) in (types.ListType, types.TupleType):
num_param=len(dash_ptn)
dash_ptn=pointer((HPDF_UINT16*num_param)(*dash_ptn))
return _HPDF_Page_SetDash(
page, #HPDF_Page
dash_ptn, #POINTER(HPDF_UINT16)
num_param, #HPDF_UINT
phase, #HPDF_UINT
)
# ri --not implemented yet
# i
#HPDF_STATUS HPDF_Page_SetFlat (HPDF_Page page, HPDF_REAL flatness)
_HPDF_Page_SetFlat=haru.HPDF_Page_SetFlat
_HPDF_Page_SetFlat.restype=HPDF_STATUS
def HPDF_Page_SetFlat(
page, #HPDF_Page
flatness, #HPDF_REAL
):
flatness=HPDF_REAL(flatness)
return _HPDF_Page_SetFlat(
page, #HPDF_Page
flatness, #HPDF_REAL
)
# gs
#HPDF_STATUS HPDF_Page_SetExtGState (HPDF_Page page, HPDF_ExtGState ext_gstate)
HPDF_Page_SetExtGState=haru.HPDF_Page_SetExtGState
HPDF_Page_SetExtGState.restype=HPDF_STATUS
#--- Special graphic state operator --------------------------------------
# q
#HPDF_STATUS HPDF_Page_GSave (HPDF_Page page)
HPDF_Page_GSave=haru.HPDF_Page_GSave
HPDF_Page_GSave.restype=HPDF_STATUS
# Q
#HPDF_STATUS HPDF_Page_GRestore (HPDF_Page page)
HPDF_Page_GRestore=haru.HPDF_Page_GRestore
HPDF_Page_GRestore.restype=HPDF_STATUS
# cm
#HPDF_STATUS HPDF_Page_Concat (HPDF_Page page, HPDF_REAL a, HPDF_REAL b, HPDF_REAL c, HPDF_REAL d, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_Concat=haru.HPDF_Page_Concat
_HPDF_Page_Concat.restype=HPDF_STATUS
def HPDF_Page_Concat(
page, #HPDF_Page
a, #HPDF_REAL
b, #HPDF_REAL
c, #HPDF_REAL
d, #HPDF_REAL
x, #HPDF_REAL
y, #HPDF_REAL
):
a=HPDF_REAL(a)
b=HPDF_REAL(b)
c=HPDF_REAL(c)
d=HPDF_REAL(d)
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_Concat(
page, #HPDF_Page
a, #HPDF_REAL
b, #HPDF_REAL
c, #HPDF_REAL
d, #HPDF_REAL
x, #HPDF_REAL
y, #HPDF_REAL
)
#--- Path construction operator ------------------------------------------
# m
#HPDF_STATUS HPDF_Page_MoveTo (HPDF_Page page, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_MoveTo=haru.HPDF_Page_MoveTo
_HPDF_Page_MoveTo.restype=HPDF_STATUS
def HPDF_Page_MoveTo(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_MoveTo(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
)
# l
#HPDF_STATUS HPDF_Page_LineTo (HPDF_Page page, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_LineTo=haru.HPDF_Page_LineTo
_HPDF_Page_LineTo.restype=HPDF_STATUS
def HPDF_Page_LineTo(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_LineTo(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
)
# c
#HPDF_STATUS HPDF_Page_CurveTo (HPDF_Page page, HPDF_REAL x1, HPDF_REAL y1, HPDF_REAL x2, HPDF_REAL y2, HPDF_REAL x3, HPDF_REAL y3)
_HPDF_Page_CurveTo=haru.HPDF_Page_CurveTo
_HPDF_Page_CurveTo.restype=HPDF_STATUS
def HPDF_Page_CurveTo(
page, #HPDF_Page
x1, #HPDF_REAL
y1, #HPDF_REAL
x2, #HPDF_REAL
y2, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
):
x1=HPDF_REAL(x1)
y1=HPDF_REAL(y1)
x2=HPDF_REAL(x2)
y2=HPDF_REAL(y2)
x3=HPDF_REAL(x3)
y3=HPDF_REAL(y3)
return _HPDF_Page_CurveTo(
page, #HPDF_Page
x1, #HPDF_REAL
y1, #HPDF_REAL
x2, #HPDF_REAL
y2, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
)
# v
#HPDF_STATUS HPDF_Page_CurveTo2 (HPDF_Page page, HPDF_REAL x2, HPDF_REAL y2, HPDF_REAL x3, HPDF_REAL y3)
_HPDF_Page_CurveTo2=haru.HPDF_Page_CurveTo2
_HPDF_Page_CurveTo2.restype=HPDF_STATUS
def HPDF_Page_CurveTo2(
page, #HPDF_Page
x2, #HPDF_REAL
y2, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
):
x2=HPDF_REAL(x2)
y2=HPDF_REAL(y2)
x3=HPDF_REAL(x3)
y3=HPDF_REAL(y3)
return _HPDF_Page_CurveTo2(
page, #HPDF_Page
x2, #HPDF_REAL
y2, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
)
# y
#HPDF_STATUS HPDF_Page_CurveTo3 (HPDF_Page page, HPDF_REAL x1, HPDF_REAL y1, HPDF_REAL x3, HPDF_REAL y3)
_HPDF_Page_CurveTo3=haru.HPDF_Page_CurveTo3
_HPDF_Page_CurveTo3.restype=HPDF_STATUS
def HPDF_Page_CurveTo3(
page, #HPDF_Page
x1, #HPDF_REAL
y1, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
):
x1=HPDF_REAL(x1)
y1=HPDF_REAL(y1)
x3=HPDF_REAL(x3)
y3=HPDF_REAL(y3)
return _HPDF_Page_CurveTo3(
page, #HPDF_Page
x1, #HPDF_REAL
y1, #HPDF_REAL
x3, #HPDF_REAL
y3, #HPDF_REAL
)
# h
#HPDF_STATUS HPDF_Page_ClosePath (HPDF_Page page)
HPDF_Page_ClosePath=haru.HPDF_Page_ClosePath
HPDF_Page_ClosePath.restype=HPDF_STATUS
# re
#HPDF_STATUS HPDF_Page_Rectangle (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL width, HPDF_REAL height)
_HPDF_Page_Rectangle=haru.HPDF_Page_Rectangle
_HPDF_Page_Rectangle.restype=HPDF_STATUS
def HPDF_Page_Rectangle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
width=HPDF_REAL(width)
height=HPDF_REAL(height)
return _HPDF_Page_Rectangle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
)
#--- Path painting operator ---------------------------------------------
# S
#HPDF_STATUS HPDF_Page_Stroke (HPDF_Page page)
_HPDF_Page_Stroke=haru.HPDF_Page_Stroke
_HPDF_Page_Stroke.restype=HPDF_STATUS
def HPDF_Page_Stroke(
page, #HPDF_Page
):
return _HPDF_Page_Stroke(
page, #HPDF_Page
)
# s
#HPDF_STATUS HPDF_Page_ClosePathStroke (HPDF_Page page)
HPDF_Page_ClosePathStroke=haru.HPDF_Page_ClosePathStroke
HPDF_Page_ClosePathStroke.restype=HPDF_STATUS
# f
#HPDF_STATUS HPDF_Page_Fill (HPDF_Page page)
HPDF_Page_Fill=haru.HPDF_Page_Fill
HPDF_Page_Fill.restype=HPDF_STATUS
# f*
#HPDF_STATUS HPDF_Page_Eofill (HPDF_Page page)
HPDF_Page_Eofill=haru.HPDF_Page_Eofill
HPDF_Page_Eofill.restype=HPDF_STATUS
# B
#HPDF_STATUS HPDF_Page_FillStroke (HPDF_Page page)
HPDF_Page_FillStroke=haru.HPDF_Page_FillStroke
HPDF_Page_FillStroke.restype=HPDF_STATUS
# B*
#HPDF_STATUS HPDF_Page_EofillStroke (HPDF_Page page)
HPDF_Page_EofillStroke=haru.HPDF_Page_EofillStroke
HPDF_Page_EofillStroke.restype=HPDF_STATUS
# b
#HPDF_STATUS HPDF_Page_ClosePathFillStroke (HPDF_Page page)
HPDF_Page_ClosePathFillStroke=haru.HPDF_Page_ClosePathFillStroke
HPDF_Page_ClosePathFillStroke.restype=HPDF_STATUS
# b*
#HPDF_STATUS HPDF_Page_ClosePathEofillStroke (HPDF_Page page)
HPDF_Page_ClosePathEofillStroke=haru.HPDF_Page_ClosePathEofillStroke
HPDF_Page_ClosePathEofillStroke.restype=HPDF_STATUS
# n
#HPDF_STATUS HPDF_Page_EndPath (HPDF_Page page)
HPDF_Page_EndPath=haru.HPDF_Page_EndPath
HPDF_Page_EndPath.restype=HPDF_STATUS
#--- Clipping paths operator --------------------------------------------
# W
#HPDF_STATUS HPDF_Page_Clip (HPDF_Page page)
HPDF_Page_Clip=haru.HPDF_Page_Clip
HPDF_Page_Clip.restype=HPDF_STATUS
# W*
#HPDF_STATUS HPDF_Page_Eoclip (HPDF_Page page)
HPDF_Page_Eoclip=haru.HPDF_Page_Eoclip
HPDF_Page_Eoclip.restype=HPDF_STATUS
#--- Text object operator -----------------------------------------------
# BT
#HPDF_STATUS HPDF_Page_BeginText (HPDF_Page page)
HPDF_Page_BeginText=haru.HPDF_Page_BeginText
HPDF_Page_BeginText.restype=HPDF_STATUS
# ET
#HPDF_STATUS HPDF_Page_EndText (HPDF_Page page)
HPDF_Page_EndText=haru.HPDF_Page_EndText
HPDF_Page_EndText.restype=HPDF_STATUS
#--- Text state ---------------------------------------------------------
# Tc
#HPDF_STATUS HPDF_Page_SetCharSpace (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetCharSpace=haru.HPDF_Page_SetCharSpace
_HPDF_Page_SetCharSpace.restype=HPDF_STATUS
def HPDF_Page_SetCharSpace(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetCharSpace(
page, #HPDF_Page
value, #HPDF_REAL
)
# Tw
#HPDF_STATUS HPDF_Page_SetWordSpace (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetWordSpace=haru.HPDF_Page_SetWordSpace
_HPDF_Page_SetWordSpace.restype=HPDF_STATUS
def HPDF_Page_SetWordSpace(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetWordSpace(
page, #HPDF_Page
value, #HPDF_REAL
)
# Tz
#HPDF_STATUS HPDF_Page_SetHorizontalScalling (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetHorizontalScalling=haru.HPDF_Page_SetHorizontalScalling
_HPDF_Page_SetHorizontalScalling.restype=HPDF_STATUS
def HPDF_Page_SetHorizontalScalling(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetHorizontalScalling(
page, #HPDF_Page
value, #HPDF_REAL
)
# TL
#HPDF_STATUS HPDF_Page_SetTextLeading (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetTextLeading=haru.HPDF_Page_SetTextLeading
_HPDF_Page_SetTextLeading.restype=HPDF_STATUS
def HPDF_Page_SetTextLeading(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetTextLeading(
page, #HPDF_Page
value, #HPDF_REAL
)
# Tf
#HPDF_STATUS HPDF_Page_SetFontAndSize (HPDF_Page page, HPDF_Font font, HPDF_REAL size)
_HPDF_Page_SetFontAndSize=haru.HPDF_Page_SetFontAndSize
_HPDF_Page_SetFontAndSize.restype=HPDF_STATUS
def HPDF_Page_SetFontAndSize(
page, #HPDF_Page
font, #HPDF_Font
size, #HPDF_REAL
):
size=HPDF_REAL(size)
return _HPDF_Page_SetFontAndSize(
page, #HPDF_Page
font, #HPDF_Font
size, #HPDF_REAL
)
# Tr
#HPDF_STATUS HPDF_Page_SetTextRenderingMode (HPDF_Page page, HPDF_TextRenderingMode mode)
HPDF_Page_SetTextRenderingMode=haru.HPDF_Page_SetTextRenderingMode
HPDF_Page_SetTextRenderingMode.restype=HPDF_STATUS
# Ts
#HPDF_STATUS HPDF_Page_SetTextRise (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetTextRise=haru.HPDF_Page_SetTextRise
_HPDF_Page_SetTextRise.restype=HPDF_STATUS
def HPDF_Page_SetTextRise(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetTextRise(
page, #HPDF_Page
value, #HPDF_REAL
)
# This function is obsolete. Use HPDF_Page_SetTextRise.
#HPDF_STATUS HPDF_Page_SetTextRaise (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetTextRaise=haru.HPDF_Page_SetTextRaise
_HPDF_Page_SetTextRaise.restype=HPDF_STATUS
def HPDF_Page_SetTextRaise(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetTextRaise(
page, #HPDF_Page
value, #HPDF_REAL
)
#--- Text positioning ---------------------------------------------------
# Td
#HPDF_STATUS HPDF_Page_MoveTextPos (HPDF_Page page, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_MoveTextPos=haru.HPDF_Page_MoveTextPos
_HPDF_Page_MoveTextPos.restype=HPDF_STATUS
def HPDF_Page_MoveTextPos(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_MoveTextPos(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
)
# TD
#HPDF_STATUS HPDF_Page_MoveTextPos2 (HPDF_Page page, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_MoveTextPos2=haru.HPDF_Page_MoveTextPos2
_HPDF_Page_MoveTextPos2.restype=HPDF_STATUS
def HPDF_Page_MoveTextPos2(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_MoveTextPos2(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
)
# Tm
#HPDF_STATUS HPDF_Page_SetTextMatrix (HPDF_Page page, HPDF_REAL a, HPDF_REAL b, HPDF_REAL c, HPDF_REAL d, HPDF_REAL x, HPDF_REAL y)
_HPDF_Page_SetTextMatrix=haru.HPDF_Page_SetTextMatrix
_HPDF_Page_SetTextMatrix.restype=HPDF_STATUS
def HPDF_Page_SetTextMatrix(
page, #HPDF_Page
a, #HPDF_REAL
b, #HPDF_REAL
c, #HPDF_REAL
d, #HPDF_REAL
x, #HPDF_REAL
y, #HPDF_REAL
):
a=HPDF_REAL(a)
b=HPDF_REAL(b)
c=HPDF_REAL(c)
d=HPDF_REAL(d)
x=HPDF_REAL(x)
y=HPDF_REAL(y)
return _HPDF_Page_SetTextMatrix(
page, #HPDF_Page
a, #HPDF_REAL
b, #HPDF_REAL
c, #HPDF_REAL
d, #HPDF_REAL
x, #HPDF_REAL
y, #HPDF_REAL
)
# T*
#HPDF_STATUS HPDF_Page_MoveToNextLine (HPDF_Page page)
HPDF_Page_MoveToNextLine=haru.HPDF_Page_MoveToNextLine
HPDF_Page_MoveToNextLine.restype=HPDF_STATUS
#--- Text showing -------------------------------------------------------
# Tj
#HPDF_STATUS HPDF_Page_ShowText (HPDF_Page page, const char *text)
_HPDF_Page_ShowText=haru.HPDF_Page_ShowText
_HPDF_Page_ShowText.restype=HPDF_STATUS
def HPDF_Page_ShowText(page,
text
):
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Page_ShowText(page,
text
)
# TJ
# '
#HPDF_STATUS HPDF_Page_ShowTextNextLine (HPDF_Page page, const char *text)
HPDF_Page_ShowTextNextLine=haru.HPDF_Page_ShowTextNextLine
HPDF_Page_ShowTextNextLine.restype=HPDF_STATUS
# "
#HPDF_STATUS HPDF_Page_ShowTextNextLineEx (HPDF_Page page, HPDF_REAL word_space, HPDF_REAL char_space, const char *text)
_HPDF_Page_ShowTextNextLineEx=haru.HPDF_Page_ShowTextNextLineEx
_HPDF_Page_ShowTextNextLineEx.restype=HPDF_STATUS
def HPDF_Page_ShowTextNextLineEx(
page, #HPDF_Page
word_space, #HPDF_REAL
char_space, #HPDF_REAL
text, #c_char_p
):
word_space=HPDF_REAL(word_space)
char_space=HPDF_REAL(char_space)
return _HPDF_Page_ShowTextNextLineEx(
page, #HPDF_Page
word_space, #HPDF_REAL
char_space, #HPDF_REAL
text, #c_char_p
)
#--- Color showing ------------------------------------------------------
# cs --not implemented yet
# CS --not implemented yet
# sc --not implemented yet
# scn --not implemented yet
# SC --not implemented yet
# SCN --not implemented yet
# g
#HPDF_STATUS HPDF_Page_SetGrayFill (HPDF_Page page, HPDF_REAL gray)
_HPDF_Page_SetGrayFill=haru.HPDF_Page_SetGrayFill
_HPDF_Page_SetGrayFill.restype=HPDF_STATUS
def HPDF_Page_SetGrayFill(
page, #HPDF_Page
gray, #HPDF_REAL
):
gray=HPDF_REAL(gray)
return _HPDF_Page_SetGrayFill(
page, #HPDF_Page
gray, #HPDF_REAL
)
# G
#HPDF_STATUS HPDF_Page_SetGrayStroke (HPDF_Page page, HPDF_REAL gray)
_HPDF_Page_SetGrayStroke=haru.HPDF_Page_SetGrayStroke
_HPDF_Page_SetGrayStroke.restype=HPDF_STATUS
def HPDF_Page_SetGrayStroke(
page, #HPDF_Page
gray, #HPDF_REAL
):
gray=HPDF_REAL(gray)
return _HPDF_Page_SetGrayStroke(
page, #HPDF_Page
gray, #HPDF_REAL
)
# rg
#HPDF_STATUS HPDF_Page_SetRGBFill (HPDF_Page page, HPDF_REAL r, HPDF_REAL g, HPDF_REAL b)
_HPDF_Page_SetRGBFill=haru.HPDF_Page_SetRGBFill
_HPDF_Page_SetRGBFill.restype=HPDF_STATUS
def HPDF_Page_SetRGBFill(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
):
r=HPDF_REAL(r)
g=HPDF_REAL(g)
b=HPDF_REAL(b)
return _HPDF_Page_SetRGBFill(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
)
# RG
#HPDF_STATUS HPDF_Page_SetRGBStroke (HPDF_Page page, HPDF_REAL r, HPDF_REAL g, HPDF_REAL b)
_HPDF_Page_SetRGBStroke=haru.HPDF_Page_SetRGBStroke
_HPDF_Page_SetRGBStroke.restype=HPDF_STATUS
def HPDF_Page_SetRGBStroke(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
):
r=HPDF_REAL(r)
g=HPDF_REAL(g)
b=HPDF_REAL(b)
return _HPDF_Page_SetRGBStroke(
page, #HPDF_Page
r, #HPDF_REAL
g, #HPDF_REAL
b, #HPDF_REAL
)
# k
#HPDF_STATUS HPDF_Page_SetCMYKFill (HPDF_Page page, HPDF_REAL c, HPDF_REAL m, HPDF_REAL y, HPDF_REAL k)
_HPDF_Page_SetCMYKFill=haru.HPDF_Page_SetCMYKFill
_HPDF_Page_SetCMYKFill.restype=HPDF_STATUS
def HPDF_Page_SetCMYKFill(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
):
c=HPDF_REAL(c)
m=HPDF_REAL(m)
y=HPDF_REAL(y)
k=HPDF_REAL(k)
return _HPDF_Page_SetCMYKFill(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
)
# K
#HPDF_STATUS HPDF_Page_SetCMYKStroke (HPDF_Page page, HPDF_REAL c, HPDF_REAL m, HPDF_REAL y, HPDF_REAL k)
_HPDF_Page_SetCMYKStroke=haru.HPDF_Page_SetCMYKStroke
_HPDF_Page_SetCMYKStroke.restype=HPDF_STATUS
def HPDF_Page_SetCMYKStroke(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
):
c=HPDF_REAL(c)
m=HPDF_REAL(m)
y=HPDF_REAL(y)
k=HPDF_REAL(k)
return _HPDF_Page_SetCMYKStroke(
page, #HPDF_Page
c, #HPDF_REAL
m, #HPDF_REAL
y, #HPDF_REAL
k, #HPDF_REAL
)
#--- Shading patterns ---------------------------------------------------
# sh --not implemented yet
#--- In-line images -----------------------------------------------------
# BI --not implemented yet
# ID --not implemented yet
# EI --not implemented yet
#--- XObjects -----------------------------------------------------------
# Do
#HPDF_STATUS HPDF_Page_ExecuteXObject (HPDF_Page page, HPDF_XObject obj)
HPDF_Page_ExecuteXObject=haru.HPDF_Page_ExecuteXObject
HPDF_Page_ExecuteXObject.restype=HPDF_STATUS
#--- Marked content -----------------------------------------------------
# BMC --not implemented yet
# BDC --not implemented yet
# EMC --not implemented yet
# MP --not implemented yet
# DP --not implemented yet
#--- Compatibility ------------------------------------------------------
# BX --not implemented yet
# EX --not implemented yet
#HPDF_STATUS HPDF_Page_DrawImage (HPDF_Page page, HPDF_Image image, HPDF_REAL x, HPDF_REAL y, HPDF_REAL width, HPDF_REAL height)
_HPDF_Page_DrawImage=haru.HPDF_Page_DrawImage
_HPDF_Page_DrawImage.restype=HPDF_STATUS
def HPDF_Page_DrawImage(
page, #HPDF_Page
image, #HPDF_Image
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
width=HPDF_REAL(width)
height=HPDF_REAL(height)
return _HPDF_Page_DrawImage(
page, #HPDF_Page
image, #HPDF_Image
x, #HPDF_REAL
y, #HPDF_REAL
width, #HPDF_REAL
height, #HPDF_REAL
)
<|fim▁hole|>def HPDF_Page_Circle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
ray=HPDF_REAL(ray)
return _HPDF_Page_Circle(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_Ellipse (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL xray, HPDF_REAL yray)
_HPDF_Page_Ellipse=haru.HPDF_Page_Ellipse
_HPDF_Page_Ellipse.restype=HPDF_STATUS
def HPDF_Page_Ellipse(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
xray, #HPDF_REAL
yray, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
xray=HPDF_REAL(xray)
yray=HPDF_REAL(yray)
return _HPDF_Page_Ellipse(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
xray, #HPDF_REAL
yray, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_Arc (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL ray, HPDF_REAL ang1, HPDF_REAL ang2)
_HPDF_Page_Arc=haru.HPDF_Page_Arc
_HPDF_Page_Arc.restype=HPDF_STATUS
def HPDF_Page_Arc(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
ang1, #HPDF_REAL
ang2, #HPDF_REAL
):
x=HPDF_REAL(x)
y=HPDF_REAL(y)
ray=HPDF_REAL(ray)
ang1=HPDF_REAL(ang1)
ang2=HPDF_REAL(ang2)
return _HPDF_Page_Arc(
page, #HPDF_Page
x, #HPDF_REAL
y, #HPDF_REAL
ray, #HPDF_REAL
ang1, #HPDF_REAL
ang2, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_TextOut (HPDF_Page page, HPDF_REAL xpos, HPDF_REAL ypos, const char *text)
_HPDF_Page_TextOut=haru.HPDF_Page_TextOut
_HPDF_Page_TextOut.restype=HPDF_STATUS
def HPDF_Page_TextOut(
page, #HPDF_Page
xpos, #HPDF_REAL
ypos, #HPDF_REAL
text, #c_char_p
):
xpos=HPDF_REAL(xpos)
ypos=HPDF_REAL(ypos)
if type(text) in (types.ListType, types.TupleType):
if type(text[-1]) != types.StringType:
text=[chr(i) for i in text]
text=''.join(text)
return _HPDF_Page_TextOut(
page, #HPDF_Page
xpos, #HPDF_REAL
ypos, #HPDF_REAL
text, #c_char_p
)
#HPDF_STATUS HPDF_Page_TextRect (HPDF_Page page, HPDF_REAL left, HPDF_REAL top, HPDF_REAL right, HPDF_REAL bottom, const char *text, HPDF_TextAlignment align, HPDF_UINT *len)
#???
_HPDF_Page_TextRect=haru.HPDF_Page_TextRect
_HPDF_Page_TextRect.restype=HPDF_STATUS
def HPDF_Page_TextRect(
page, #HPDF_Page
left, #HPDF_REAL
top, #HPDF_REAL
right, #HPDF_REAL
bottom, #HPDF_REAL
text, #c_char_p
align, #HPDF_TextAlignment
length, #POINTER(HPDF_UINT)
):
left=HPDF_REAL(left)
top=HPDF_REAL(top)
right=HPDF_REAL(right)
bottom=HPDF_REAL(bottom)
if type(length) in (types.ListType, types.TupleType):
size=len(length)
length=pointer((HPDF_UINT*size)(*length))
return _HPDF_Page_TextRect(
page, #HPDF_Page
left, #HPDF_REAL
top, #HPDF_REAL
right, #HPDF_REAL
bottom, #HPDF_REAL
text, #c_char_p
align, #HPDF_TextAlignment
length, #POINTER(HPDF_UINT)
)
#HPDF_STATUS HPDF_Page_SetSlideShow (HPDF_Page page, HPDF_TransitionStyle type, HPDF_REAL disp_time, HPDF_REAL trans_time)
_HPDF_Page_SetSlideShow=haru.HPDF_Page_SetSlideShow
_HPDF_Page_SetSlideShow.restype=HPDF_STATUS
def HPDF_Page_SetSlideShow(
page, #HPDF_Page
tType, #HPDF_TransitionStyle
disp_time, #HPDF_REAL
trans_time, #HPDF_REAL
):
disp_time=HPDF_REAL(disp_time)
trans_time=HPDF_REAL(trans_time)
return _HPDF_Page_SetSlideShow(
page, #HPDF_Page
tType, #HPDF_TransitionStyle
disp_time, #HPDF_REAL
trans_time, #HPDF_REAL
)
NULL=0
HPDF_NOPNGLIB=False<|fim▁end|> |
#HPDF_STATUS HPDF_Page_Circle (HPDF_Page page, HPDF_REAL x, HPDF_REAL y, HPDF_REAL ray)
_HPDF_Page_Circle=haru.HPDF_Page_Circle
_HPDF_Page_Circle.restype=HPDF_STATUS
|
<|file_name|>LabelDetail.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
export interface LabelDetailProps {
[key: string]: any;
/** An element type to render as (string or function). */
as?: any;
/** Primary content. */
children?: React.ReactNode;
<|fim▁hole|> /** Additional classes. */
className?: string;
/** Shorthand for primary content. */
content?: React.ReactNode;
}
declare const LabelDetail: React.StatelessComponent<LabelDetailProps>;
export default LabelDetail;<|fim▁end|> | |
<|file_name|>vmprof_profiler.py<|end_file_name|><|fim▁begin|>import os
import shutil
import tempfile
import vmprof
import prof_six as six
from _prof_imports import TreeStats, CallTreeStat
class VmProfProfile(object):
""" Wrapper class that represents VmProf Python profiling backend with API matching
the cProfile.
"""
def __init__(self):
self.stats = None
self.basepath = None
self.file = None
self.is_enabled = False
def runcall(self, func, *args, **kw):
self.enable()
try:
return func(*args, **kw)
finally:
self.disable()
def enable(self):
if not self.is_enabled:
if not os.path.exists(self.basepath):
os.makedirs(self.basepath)
self.file = tempfile.NamedTemporaryFile(delete=False, dir=self.basepath)
try:
vmprof.enable(self.file.fileno(), lines=True)
except:
vmprof.enable(self.file.fileno())
self.is_enabled = True
def disable(self):
if self.is_enabled:
vmprof.disable()
self.file.close()
self.is_enabled = False
def create_stats(self):
return None
def getstats(self):
self.create_stats()
return self.stats
def dump_stats(self, file):
shutil.copyfile(self.file.name, file)
def _walk_tree(self, parent, node, callback):
tree = callback(parent, node)
for c in six.itervalues(node.children):
self._walk_tree(node, c, callback)
return tree
def tree_stats_to_response(self, filename, response):
tree_stats_to_response(filename, response)
def snapshot_extension(self):
return '.prof'
def _walk_tree(parent, node, callback):
if node is None:
return None
tree = callback(parent, node)
for c in six.itervalues(node.children):
_walk_tree(tree, c, callback)
return tree
def tree_stats_to_response(filename, response):
stats = vmprof.read_profile(filename)
response.tree_stats = TreeStats()
response.tree_stats.sampling_interval = vmprof.DEFAULT_PERIOD
try:
tree = stats.get_tree()
except vmprof.stats.EmptyProfileFile:
tree = None
def convert(parent, node):
tstats = CallTreeStat()
tstats.name = node.name
tstats.count = node.count
tstats.children = []
tstats.line_count = getattr(node, 'lines', {})
if parent is not None:
if parent.children is None:
parent.children = []
parent.children.append(tstats)<|fim▁hole|> response.tree_stats.call_tree = _walk_tree(None, tree, convert)<|fim▁end|> |
return tstats
|
<|file_name|>mdl-layout.component.ts<|end_file_name|><|fim▁begin|>import {
AfterContentInit,
Component,
ContentChild,
ContentChildren,
ElementRef,
EventEmitter,
Inject,
Injectable,
InjectionToken,
Input,
NgZone,
OnChanges,
OnDestroy,
Optional,
Output,
QueryList,
Renderer2,
SimpleChanges,
ViewEncapsulation,
} from "@angular/core";
import { EventManager } from "@angular/platform-browser";
import { MdlLayoutHeaderComponent } from "./mdl-layout-header.component";
import { MdlLayoutDrawerComponent } from "./mdl-layout-drawer.component";
import { MdlLayoutContentComponent } from "./mdl-layout-content.component";
import { BehaviorSubject, Observable, Subscription } from "rxjs";
import { toBoolean } from "../common/boolean-property";
import { toNumber } from "../common/number.property";
import { MdlError } from "../common/mdl-error";
import { MdlLayoutMediatorService } from "./mdl-layout-mediator.service";
import { MdlLayoutTabPanelComponent } from "./mdl-layout-tab-panel.component";
const ESCAPE = 27;
const STANDARD = "standard";
const WATERFALL = "waterfall";
const SCROLL = "scroll";
/**
* The LAYOUT_SCREEN_SIZE_THRESHOLD can be changed at the root module. Just provide a value for this InjectionToken:
*
* providers: [
* {provide:LAYOUT_SCREEN_SIZE_THRESHOLD, useValue: 768 }
* ]
*
* you also need to change the scss variable to the same value: $layout-screen-size-threshold: 768px.
*
* It should be clear that this can only be used if you are using the scss and not the pre compiled css from getmdl.io.
*
*/
export const LAYOUT_SCREEN_SIZE_THRESHOLD = new InjectionToken<number>(
"layoutScreenSizeThreshold"
);
export class MdLUnsupportedLayoutTypeError extends MdlError {
constructor(type: string) {
/* istanbul ignore next */
super(
`Layout type "${type}" isn't supported by mdl-layout (allowed: standard, waterfall, scroll).`
);
}
}
@Injectable({
providedIn: "root",
})
export class MdlScreenSizeService {
private sizesSubject: BehaviorSubject<boolean> = new BehaviorSubject(false);
private windowMediaQueryListener: () => void;
constructor(
ngZone: NgZone,
@Optional()
@Inject(LAYOUT_SCREEN_SIZE_THRESHOLD)
public layoutScreenSizeThreshold: number
) {
// if no value is injected the default size wil be used. same as $layout-screen-size-threshold in scss
if (!this.layoutScreenSizeThreshold) {
this.layoutScreenSizeThreshold = 1024;
}
// do not try to access the window object if rendered on the server
if (typeof window === "object" && "matchMedia" in window) {
const query: MediaQueryList = window.matchMedia(
`(max-width: ${this.layoutScreenSizeThreshold}px)`
);
const queryListener = () => {
ngZone.run(() => {
this.sizesSubject.next(query.matches);
});
};
// - addEventListener not working in Safari
// eslint-disable-next-line
query.addListener(queryListener);
this.windowMediaQueryListener = () => {
// eslint-disable-next-line
query.removeListener(queryListener);
};
// set the initial state
this.sizesSubject.next(query.matches);
}
}
public isSmallScreen(): boolean {
return this.sizesSubject.value;
}
public sizes(): Observable<boolean> {
return this.sizesSubject.asObservable();
}
destroy(): void {
if (this.windowMediaQueryListener) {
this.windowMediaQueryListener();
this.windowMediaQueryListener = null;
}
}
}
@Component({
selector: "mdl-layout",
template: `
<div
class="mdl-layout__container"
[ngClass]="{ 'has-scrolling-header': mode === 'scroll' }"
>
<div
class="mdl-layout is-upgraded"
[ngClass]="{
'is-small-screen': isSmallScreen,
'mdl-layout--fixed-drawer': isFixedDrawer,
'mdl-layout--fixed-header': isFixedHeader,
'mdl-layout--fixed-tabs': 'tabs.toArray().length > 0'
}"
>
<ng-content select="mdl-layout-header"></ng-content>
<ng-content select="mdl-layout-drawer"></ng-content>
<div
*ngIf="drawers.length > 0 && isNoDrawer === false"
class="mdl-layout__drawer-button"
(click)="toggleDrawer()"
>
<mdl-icon></mdl-icon>
</div>
<ng-content select="mdl-layout-content"></ng-content>
<div
class="mdl-layout__obfuscator"
[ngClass]="{ 'is-visible': isDrawerVisible }"
(click)="toggleDrawer()"
(keydown)="obfuscatorKeyDown($event)"
></div>
</div>
</div>
`,
exportAs: "mdlLayout",
encapsulation: ViewEncapsulation.None,
})
export class MdlLayoutComponent
implements AfterContentInit, OnDestroy, OnChanges {
@ContentChild(MdlLayoutHeaderComponent)
header;
// will be set to undefined, if not a direct child or not present in 2.0.0 i
// n 2.0.1 it is now the grand child drawer again :(
@ContentChildren(MdlLayoutDrawerComponent, { descendants: false })
drawers: QueryList<MdlLayoutDrawerComponent>;
@ContentChild(MdlLayoutContentComponent, { static: true })
content;
// eslint-disable-next-line
@Input("mdl-layout-mode")
mode: string = STANDARD;
// eslint-disable-next-line
@Output("mdl-layout-tab-active-changed")
selectedTabEmitter = new EventEmitter();
// eslint-disable-next-line
@Output("mdl-layout-tab-mouseover")
mouseoverTabEmitter = new EventEmitter();
// eslint-disable-next-line
@Output("mdl-layout-tab-mouseout")
mouseoutTabEmitter = new EventEmitter();
// eslint-disable-next-line
@Output("open")
// eslint-disable-next-line @angular-eslint/no-output-on-prefix
onOpen = new EventEmitter<void>();
// eslint-disable-next-line
@Output("close")
// eslint-disable-next-line @angular-eslint/no-output-on-prefix
onClose = new EventEmitter<void>();
isDrawerVisible = false;
isSmallScreen = false;
private scrollListener: (
target?: "window" | "document" | "body" | unknown,
eventName?: string,
callback?: (event: Event) => boolean | void
) => void;
private isFixedDrawerIntern = false;
private isFixedHeaderIntern = false;
private isSeamedIntern = false;
private selectedIndexIntern = 0;
private isNoDrawerIntern = false;
private subscriptions: Subscription[] = [];
constructor(
private renderer: Renderer2,
private evm: EventManager,
private el: ElementRef,
private screenSizeService: MdlScreenSizeService,
private layoutMediatorService: MdlLayoutMediatorService
) {}
@Input("mdl-layout-fixed-drawer")
get isFixedDrawer(): boolean {
return this.isFixedDrawerIntern;
}
set isFixedDrawer(value: boolean) {
this.isFixedDrawerIntern = toBoolean(value);
}
@Input("mdl-layout-fixed-header")
get isFixedHeader(): boolean {
return this.isFixedHeaderIntern;
}
set isFixedHeader(value: boolean) {
this.isFixedHeaderIntern = toBoolean(value);
}
@Input("mdl-layout-header-seamed")
get isSeamed(): boolean {
return this.isSeamedIntern;
}
set isSeamed(value: boolean) {
this.isSeamedIntern = toBoolean(value);
}
@Input("mdl-layout-tab-active-index")
get selectedIndex(): number {
return this.selectedIndexIntern;
}
set selectedIndex(value: number) {
this.selectedIndexIntern = toNumber(value);
}
<|fim▁hole|> return this.isNoDrawerIntern;
}
set isNoDrawer(value: boolean) {
this.isNoDrawerIntern = toBoolean(value);
}
ngAfterContentInit(): void {
this.validateMode();
if (this.header && this.content && this.content.tabs) {
this.header.tabs = this.content.tabs;
this.updateSelectedTabIndex();
}
if (this.header && this.header.tabs) {
this.subscriptions.push(
this.layoutMediatorService
.onTabMouseOut()
.subscribe((tab: MdlLayoutTabPanelComponent) => {
this.onTabMouseout(tab);
})
);
this.subscriptions.push(
this.layoutMediatorService
.onTabMouseover()
.subscribe((tab: MdlLayoutTabPanelComponent) => {
this.onTabMouseover(tab);
})
);
this.subscriptions.push(
this.layoutMediatorService
.onTabSelected()
.subscribe((tab: MdlLayoutTabPanelComponent) => {
this.tabSelected(tab);
})
);
}
}
public ngOnChanges(changes: SimpleChanges): void {
if (changes.selectedIndex) {
this.updateSelectedTabIndex();
}
}
toggleDrawer(): void {
this.isDrawerVisible = !this.isDrawerVisible;
if (this.drawers.length > 0) {
this.setDrawerVisible(this.isDrawerVisible);
}
}
closeDrawer(): void {
this.isDrawerVisible = false;
if (this.drawers.length > 0) {
this.setDrawerVisible(false);
}
}
openDrawer(): void {
this.isDrawerVisible = true;
if (this.drawers.length > 0) {
this.setDrawerVisible(true);
}
}
obfuscatorKeyDown($event: KeyboardEvent): void {
if ($event.keyCode === ESCAPE) {
this.toggleDrawer();
}
}
ngOnDestroy(): void {
if (this.scrollListener) {
this.scrollListener();
this.scrollListener = null;
}
this.subscriptions.forEach((sub) => sub.unsubscribe());
}
closeDrawerOnSmallScreens(): void {
if (this.isSmallScreen && this.isDrawerVisible) {
this.closeDrawer();
}
}
openDrawerOnSmallScreens(): void {
if (this.isSmallScreen && !this.isDrawerVisible) {
this.openDrawer();
}
}
hasDrawer(): boolean {
return this.drawers.length > 0;
}
private tabSelected(tab: MdlLayoutTabPanelComponent) {
const index = this.header.tabs.toArray().indexOf(tab);
if (index !== this.selectedIndex) {
this.selectedIndex = index;
this.updateSelectedTabIndex();
this.selectedTabEmitter.emit({ index: this.selectedIndex });
}
}
private onTabMouseover(tab: MdlLayoutTabPanelComponent) {
const index = this.header.tabs.toArray().indexOf(tab);
this.mouseoverTabEmitter.emit({ index });
}
private onTabMouseout(tab: MdlLayoutTabPanelComponent) {
const index = this.header.tabs.toArray().indexOf(tab);
this.mouseoutTabEmitter.emit({ index });
}
private updateSelectedTabIndex() {
if (this.header && this.header.tabs) {
this.header.tabs.forEach((tab) => (tab.isActive = false));
if (
this.header.tabs.toArray().length > 0 &&
this.selectedIndex < this.header.tabs.toArray().length
) {
this.header.tabs.toArray()[this.selectedIndex].isActive = true;
}
}
}
private validateMode() {
if (this.mode === "") {
this.mode = STANDARD;
}
if ([STANDARD, WATERFALL, SCROLL].indexOf(this.mode) === -1) {
throw new MdLUnsupportedLayoutTypeError(this.mode);
}
if (this.header) {
// inform the header about the mode
this.header.mode = this.mode;
this.header.isSeamed = this.isSeamed;
}
if (this.content) {
this.scrollListener = this.renderer.listen(
this.content.el,
"scroll",
() => {
this.onScroll(this.content.el.scrollTop);
return true;
}
);
this.screenSizeService.sizes().subscribe((isSmall: boolean) => {
this.onQueryChange(isSmall);
});
}
}
private onScroll(scrollTop) {
if (this.mode !== WATERFALL) {
return;
}
if (this.header.isAnimating) {
return;
}
const headerVisible = !this.isSmallScreen || this.isFixedHeader;
if (scrollTop > 0 && !this.header.isCompact) {
this.header.isCompact = true;
if (headerVisible) {
this.header.isAnimating = true;
}
} else if (scrollTop <= 0 && this.header.isCompact) {
this.header.isCompact = false;
if (headerVisible) {
this.header.isAnimating = true;
}
}
}
private onQueryChange(isSmall: boolean) {
if (isSmall) {
this.isSmallScreen = true;
} else {
this.isSmallScreen = false;
this.closeDrawer();
}
}
private setDrawerVisible(visible: boolean) {
this.drawers.first.isDrawerVisible = visible;
if (this.drawers.first.isDrawerVisible) {
this.onOpen.emit();
} else {
this.onClose.emit();
}
}
}<|fim▁end|> | @Input("mdl-layout-no-drawer-button")
get isNoDrawer(): boolean { |
<|file_name|>custom.js<|end_file_name|><|fim▁begin|>'use strict';
//var async = require('async'),
// nconf = require('nconf'),
// user = require('../user'),
// groups = require('../groups'),
// topics = require('../topics'),
// posts = require('../posts'),
// notifications = require('../notifications'),
// messaging = require('../messaging'),
// plugins = require('../plugins'),
// utils = require('../../public/src/utils'),
// websockets = require('./index'),
// meta = require('../meta'),
var linkParser = require('../controllers/mind-map/linkParser_new-format'),
swaggerBuilder = require('../modeling/swaggerBuilder'),
swaggerBuildertr069 = require('../modeling/swaggerBuilder-Scope');
var SocketCustom = {};<|fim▁hole|> return callback(new Error('[[invalid-uid]]'));
}
linkParser.init(function(err) {
callback(null, '{"message": "Refreshed Link Parser"}');
});
};
SocketCustom.refreshSwagger = function(socket, data, callback) {
if (!socket.uid) {
return callback(new Error('[[invalid-uid]]'));
}
swaggerBuilder.init(function(err) {
callback(null, '{"message": "Refreshed Swagger File"}');
});
};
SocketCustom.refreshZoneSwagger = function(socket, data, callback) {
if (!socket.uid) {
return callback(new Error('[[invalid-uid]]'));
}
swaggerBuildertr069.init(function(err) {
callback(null, '{"message": "Refreshed Zone Swagger File"}');
});
};
/* Exports */
module.exports = SocketCustom;<|fim▁end|> |
SocketCustom.refreshLinkParser = function(socket, sets, callback) {
if (!socket.uid) { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|><|fim▁end|> | """Parse Archimate XML Exchange File Format into a MongoDB DB""" |
<|file_name|>goaliedash.server.routes.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
/**
* Module dependencies.
*/
var users = require('../../app/controllers/users'),
goaliedash = require('../../app/controllers/goaliedash');
module.exports = function(app) {
app.route('/goaliedash')
.get(users.requiresLogin, users.hasAuthorization);
};<|fim▁end|> | 'use strict'; |
<|file_name|>comment.js<|end_file_name|><|fim▁begin|>/**
* @license Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved.
* For licensing, see LICENSE.md or http://ckeditor.com/license
*/
/**
* @fileOverview Defines the {@link CKEDITOR.dom.comment} class, which represents
* a DOM comment node.
*/
/**
* Represents a DOM comment node.
*
* var nativeNode = document.createComment( 'Example' );
* var comment = new CKEDITOR.dom.comment( nativeNode );
*
* var comment = new CKEDITOR.dom.comment( 'Example' );
*
* @class
* @extends CKEDITOR.dom.node
* @constructor Creates a comment class instance.
* @param {Object/String} comment A native DOM comment node or a string containing
* the text to use to create a new comment node.
* @param {CKEDITOR.dom.document} [ownerDocument] The document that will contain
* the node in case of new node creation. Defaults to the current document.
*/
CKEDITOR.dom.comment = function( comment, ownerDocument ) {
if ( typeof comment == 'string' )
comment = ( ownerDocument ? ownerDocument.$ : document ).createComment( comment );<|fim▁hole|>};
CKEDITOR.dom.comment.prototype = new CKEDITOR.dom.node();
CKEDITOR.tools.extend( CKEDITOR.dom.comment.prototype, {
/**
* The node type. This is a constant value set to {@link CKEDITOR#NODE_COMMENT}.
*
* @readonly
* @property {Number} [=CKEDITOR.NODE_COMMENT]
*/
type: CKEDITOR.NODE_COMMENT,
/**
* Gets the outer HTML of this comment.
*
* @returns {String} The HTML `<!-- comment value -->`.
*/
getOuterHtml: function() {
return '<!--' + this.$.nodeValue + '-->';
}
} );<|fim▁end|> |
CKEDITOR.dom.domObject.call( this, comment ); |
<|file_name|>OsiamUserService.java<|end_file_name|><|fim▁begin|>package org.osiam.client;
/*
* for licensing see the file license.txt.
*/
import static org.apache.http.HttpStatus.SC_FORBIDDEN;
import static org.apache.http.HttpStatus.SC_OK;
import static org.apache.http.HttpStatus.SC_UNAUTHORIZED;
import static org.apache.http.HttpStatus.SC_CONFLICT;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.entity.ContentType;
import org.apache.http.impl.client.DefaultHttpClient;
import org.osiam.client.exception.ConflictException;
import org.osiam.client.exception.ConnectionInitializationException;
import org.osiam.client.exception.ForbiddenException;
import org.osiam.client.exception.NoResultException;
import org.osiam.client.exception.UnauthorizedException;
import org.osiam.client.oauth.AccessToken;
import org.osiam.client.query.Query;
import org.osiam.client.query.QueryResult;
import org.osiam.client.update.UpdateUser;
import org.osiam.resources.scim.User;
/**
* The OsiamUserService provides all methods necessary to manipulate the User-entries registered in the
* given OSIAM installation. For the construction of an instance please use the included {@link OsiamUserService.Builder}
*/
public final class OsiamUserService extends AbstractOsiamService<User> { // NOSONAR - Builder constructs instances of this class
/**
* The private constructor for the OsiamUserService. Please use the {@link OsiamUserService.Builder}
* to construct one.
*
* @param userWebResource a valid WebResource to connect to a given OSIAM server
*/
private OsiamUserService(Builder builder) {
super(builder);
}
/**
* Retrieve a single User with the given id. If no user for the given id can be found a {@link NoResultException}
* is thrown.
*
* @param id the id of the wanted user
* @param accessToken the OSIAM access token from for the current session
* @return the user with the given id
* @throws org.osiam.client.exception.UnauthorizedException if the request could not be authorized.
* @throws org.osiam.client.exception.NoResultException if no user with the given id can be found
* @throws org.osiam.client.exception.ForbiddenException if the scope doesn't allow this request
* @throws org.osiam.client.exception.ConnectionInitializationException
* if the connection to the given OSIAM service could not be initialized
*/
public User getUser(String id, AccessToken accessToken) {
return getResource(id, accessToken);
}
/**
* Retrieve the User who holds the given access token.
* Not to be used for the grant Client-Credentials
* @param accessToken the OSIAM access token from for the current session
* @return the actual logged in user
* @throws UnauthorizedException if the request could not be authorized.
* @throws org.osiam.client.exception.ForbiddenException if the scope doesn't allow this request
* @throws ConnectionInitializationException
* if no connection to the given OSIAM services could be initialized
*/
public User getMeBasic(AccessToken accessToken) {
final User user;
if (accessToken == null) { // NOSONAR - false-positive from clover; if-expression is correct
throw new IllegalArgumentException("The given accessToken can't be null.");
}
try {
DefaultHttpClient httpclient = new DefaultHttpClient();
HttpGet realWebresource = createRealWebResource(accessToken);
realWebresource.setURI(new URI(getMeWebResource().getURI().toString()));
HttpResponse response = httpclient.execute(realWebresource);
int httpStatus = response.getStatusLine().getStatusCode();
if (httpStatus != SC_OK) { // NOSONAR - false-positive from clover; if-expression is correct
String errorMessage;
switch (httpStatus) {
case SC_UNAUTHORIZED:
errorMessage = getErrorMessage(response, "You are not authorized to access OSIAM. Please make sure your access token is valid");
throw new UnauthorizedException(errorMessage);
case SC_FORBIDDEN:
errorMessage = "Insufficient scope (" + accessToken.getScope() + ") to retrieve the actual User.";
throw new ForbiddenException(errorMessage);
case SC_CONFLICT:
errorMessage = getErrorMessage(response, "Unable to retrieve the actual User.");
throw new ConflictException(errorMessage);
default:
errorMessage = getErrorMessage(response, String.format("Unable to setup connection (HTTP Status Code: %d)", httpStatus));
throw new ConnectionInitializationException(errorMessage);
}
}
InputStream content = response.getEntity().getContent();
user = mapSingleResourceResponse(content);
return user;
} catch (IOException | URISyntaxException e) {
throw new ConnectionInitializationException("Unable to setup connection", e);
}
}
/**
* Retrieve the User who holds the given access token.
* Not to be used for the grant Client-Credentials
* @param accessToken the OSIAM access token from for the current session
* @return the actual logged in user
* @throws UnauthorizedException if the request could not be authorized.
* @throws org.osiam.client.exception.ForbiddenException if the scope doesn't allow this request
* @throws ConnectionInitializationException
* if no connection to the given OSIAM services could be initialized
*/
public User getMe(AccessToken accessToken) {
final User user;
if (accessToken == null) { // NOSONAR - false-positive from clover; if-expression is correct
throw new IllegalArgumentException("The given accessToken can't be null.");
}
try {
DefaultHttpClient httpclient = new DefaultHttpClient();
HttpGet realWebresource = createRealWebResource(accessToken);
realWebresource.setURI(new URI(getUri() + "/me"));
HttpResponse response = httpclient.execute(realWebresource);
int httpStatus = response.getStatusLine().getStatusCode();
if (httpStatus != SC_OK) { // NOSONAR - false-positive from clover; if-expression is correct
String errorMessage;
switch (httpStatus) {
case SC_UNAUTHORIZED:
errorMessage = getErrorMessage(response, "You are not authorized to access OSIAM. Please make sure your access token is valid");
throw new UnauthorizedException(errorMessage);
case SC_FORBIDDEN:
errorMessage = "Insufficient scope (" + accessToken.getScope() + ") to retrieve the actual User.";
throw new ForbiddenException(errorMessage);
case SC_CONFLICT:
errorMessage = getErrorMessage(response, "Unable to retrieve the actual User.");
throw new ConflictException(errorMessage);
default:<|fim▁hole|> }
}
InputStream content = response.getEntity().getContent();
user = mapSingleResourceResponse(content);
return user;
} catch (IOException | URISyntaxException e) {
throw new ConnectionInitializationException("Unable to setup connection", e);
}
}
protected HttpGet getMeWebResource() {
HttpGet webResource;
try {
webResource = new HttpGet(new URI(getEndpoint() + "/me"));
webResource.addHeader("Accept", ContentType.APPLICATION_JSON.getMimeType());
} catch (URISyntaxException e) {
throw new ConnectionInitializationException("Unable to setup connection " + getEndpoint() +
"is not a valid URI.", e);
}
return webResource;
}
/**
* Retrieve a list of the of all {@link User} resources saved in the OSIAM service.
* If you need to have all User but the number is very big, this method can be slow.
* In this case you can also use Query.Builder with no filter to split the number of User returned
*
* @param accessToken
* @return a list of all Users
* @throws org.osiam.client.exception.UnauthorizedException if the request could not be authorized.
* @throws org.osiam.client.exception.ForbiddenException if the scope doesn't allow this request
* @throws org.osiam.client.exception.ConnectionInitializationException
* if the connection to the given OSIAM service could not be initialized
*/
public List<User> getAllUsers(AccessToken accessToken) {
return super.getAllResources(accessToken);
}
/**
* Search for the existing Users by a given search string. For more detailed information about the possible
* logical operators and usable fields please have a look into the wiki.<p>
* <b>Note:</b> The query string should be URL encoded!
*
* @param queryString The URL encoded string with the query that should be passed to the OSIAM service
* @param accessToken the OSIAM access token from for the current session
* @return a QueryResult Containing a list of all found Users
* @throws org.osiam.client.exception.UnauthorizedException if the request could not be authorized.
* @throws org.osiam.client.exception.ForbiddenException if the scope doesn't allow this request
* @throws org.osiam.client.exception.ConnectionInitializationException
* if the connection to the given OSIAM service could not be initialized
* @see <a href="https://github.com/osiam/connector4java/wiki/Working-with-user#search-for-user">https://github.com/osiam/connector4java/wiki/Working-with-user#search-for-user</a>
*/
public QueryResult<User> searchUsers(String queryString, AccessToken accessToken) {
return super.searchResources(queryString, accessToken);
}
/**
* Search for existing Users by the given {@link Query}.
*
* @param query containing the query to execute.
* @param accessToken the OSIAM access token from for the current session
* @return a QueryResult Containing a list of all found Users
* @throws org.osiam.client.exception.UnauthorizedException if the request could not be authorized.
* @throws org.osiam.client.exception.ForbiddenException if the scope doesn't allow this request
* @throws org.osiam.client.exception.ConnectionInitializationException
* if the connection to the given OSIAM service could not be initialized
*/
public QueryResult<User> searchUsers(Query query, AccessToken accessToken) {
return super.searchResources(query, accessToken);
}
/**
* delete the given {@link User} at the OSIAM DB.
* @param id id of the User to be delete
* @param accessToken the OSIAM access token from for the current session
* @throws org.osiam.client.exception.UnauthorizedException if the request could not be authorized.
* @throws org.osiam.client.exception.NoResultException if no user with the given id can be found
* @throws org.osiam.client.exception.ConflictException if the User could not be deleted
* @throws org.osiam.client.exception.ForbiddenException if the scope doesn't allow this request
* @throws org.osiam.client.exception.ConnectionInitializationException
* if the connection to the given OSIAM service could not be initialized
*/
public void deleteUser(String id, AccessToken accessToken) {
deleteResource(id, accessToken);
}
/**
* saves the given {@link User} to the OSIAM DB.
* @param user user to be saved
* @param accessToken the OSIAM access token from for the current session
* @return the same user Object like the given but with filled metadata and a new valid id
* @throws org.osiam.client.exception.UnauthorizedException if the request could not be authorized.
* @throws org.osiam.client.exception.ConflictException if the User could not be created
* @throws org.osiam.client.exception.ForbiddenException if the scope doesn't allow this request
* @throws org.osiam.client.exception.ConnectionInitializationException
* if the connection to the given OSIAM service could not be initialized
*/
public User createUser(User user, AccessToken accessToken) {
return createResource(user, accessToken);
}
/**
* update the user of the given id with the values given in the User Object.
* For more detailed information how to set new field, update Fields or to delete Fields please look in the wiki
* @param id if of the User to be updated
* @param updateUser all Fields that need to be updated
* @param accessToken the OSIAM access token from for the current session
* @return the updated User Object with all new Fields
* @see <a href="https://github.com/osiam/connector4java/wiki/Working-with-user">https://github.com/osiam/connector4java/wiki/Working-with-user</a>
* @throws org.osiam.client.exception.UnauthorizedException if the request could not be authorized.
* @throws org.osiam.client.exception.ConflictException if the User could not be updated
* @throws org.osiam.client.exception.NotFoundException if no group with the given id can be found
* @throws org.osiam.client.exception.ForbiddenException if the scope doesn't allow this request
* @throws org.osiam.client.exception.ConnectionInitializationException
* if the connection to the given OSIAM service could not be initialized
*/
public User updateUser(String id, UpdateUser updateUser , AccessToken accessToken){
if (updateUser == null) { // NOSONAR - false-positive from clover; if-expression is correct
throw new IllegalArgumentException("The given updateUser can't be null.");
}
return updateResource(id, updateUser.getScimConformUpdateUser(), accessToken);
}
/**
* The Builder class is used to construct instances of the {@link OsiamUserService}
*/
public static class Builder extends AbstractOsiamService.Builder<User> {
/**
* Set up the Builder for the construction of an {@link OsiamUserService} instance for the OSIAM service at
* the given endpoint
*
* @param endpoint The URL at which the OSIAM server lives.
*/
public Builder(String endpoint) {
super(endpoint);
}
/**
* constructs an OsiamUserService with the given values
*
* @return a valid OsiamUserService
*/
public OsiamUserService build() {
return new OsiamUserService(this);
}
}
}<|fim▁end|> | errorMessage = getErrorMessage(response, String.format("Unable to setup connection (HTTP Status Code: %d)", httpStatus));
throw new ConnectionInitializationException(errorMessage); |
<|file_name|>precip_ingest.py<|end_file_name|><|fim▁begin|>"""Ingest Stage IV Hourly Files.
1. Copies to hourly stage IV netCDF files
2. Copies hourly stage IV netCDF to hourly IEMRE
"""
import os
import datetime
import sys
import numpy as np
from scipy.interpolate import NearestNDInterpolator
import pygrib
from pyiem import iemre
from pyiem.util import utc, ncopen, logger
LOG = logger()
def get_p01m_status(valid):
"""Figure out what our current status is of this hour."""
nc = ncopen(
("/mesonet/data/stage4/%s_stage4_hourly.nc") % (valid.year,),
timeout=300,
)
tidx = iemre.hourly_offset(valid)
# 2 prism_adjust_stage4 ran
# 1 copied hourly data in
# 0 nothing happened
p01m_status = nc.variables["p01m_status"][tidx]
nc.close()
LOG.debug("p01m_status is %s for valid %s", p01m_status, valid)
return p01m_status
def ingest_hourly_grib(valid):
"""Copy the hourly grib data into the netcdf storage.
Returns:
int value of the new p01m_status
"""
tidx = iemre.hourly_offset(valid)
fn = valid.strftime(
"/mesonet/ARCHIVE/data/%Y/%m/%d/stage4/ST4.%Y%m%d%H.01h.grib"
)
if not os.path.isfile(fn):
LOG.info("stage4_ingest: missing file %s", fn)
return 0
gribs = pygrib.open(fn)
grb = gribs[1]
val = grb.values
# values over 10 inches are bad
val = np.where(val > 250.0, 0, val)
ncfn = f"/mesonet/data/stage4/{valid.year}_stage4_hourly.nc"
with ncopen(ncfn, "a", timeout=300) as nc:
p01m = nc.variables["p01m"]
# account for legacy grid prior to 2002
if val.shape == (880, 1160):
p01m[tidx, 1:, :] = val[:, 39:]
else:
p01m[tidx, :, :] = val
nc.variables["p01m_status"][tidx] = 1
LOG.debug(
"write p01m to stage4 netcdf min: %.2f avg: %.2f max: %.2f",
np.min(val),
np.mean(val),
np.max(val),
)
return 1
def copy_to_iemre(valid):
"""verbatim copy over to IEMRE."""
tidx = iemre.hourly_offset(valid)
ncfn = f"/mesonet/data/stage4/{valid.year}_stage4_hourly.nc"
with ncopen(ncfn, "a", timeout=300) as nc:
lats = nc.variables["lat"][:]
lons = nc.variables["lon"][:]
val = nc.variables["p01m"][tidx]
# Our data is 4km, iemre is 0.125deg, so we stride some to cut down on mem
stride = slice(None, None, 3)
lats = np.ravel(lats[stride, stride])
lons = np.ravel(lons[stride, stride])
vals = np.ravel(val[stride, stride])
nn = NearestNDInterpolator((lons, lats), vals)
xi, yi = np.meshgrid(iemre.XAXIS, iemre.YAXIS)
res = nn(xi, yi)
# Lets clip bad data
# 10 inches per hour is bad data
res = np.where(np.logical_or(res < 0, res > 250), 0.0, res)
# Open up our RE file
nc = ncopen(iemre.get_hourly_ncname(valid.year), "a", timeout=300)
nc.variables["p01m"][tidx, :, :] = res
LOG.debug(
"wrote data to hourly IEMRE min: %.2f avg: %.2f max: %.2f",
np.min(res),
np.mean(res),
np.max(res),
)
nc.close()
def workflow(valid):
"""Our stage IV workflow."""
# Figure out what the current status is
p01m_status = get_p01m_status(valid)
if np.ma.is_masked(p01m_status) or p01m_status < 2:
# merge in the raw hourly data
ingest_hourly_grib(valid)
copy_to_iemre(valid)
<|fim▁hole|> if len(argv) == 5:
ts = utc(int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4]))
workflow(ts)
return
# Otherwise we are running for an explicit 12z to 12z period, copy only
ets = utc(int(argv[1]), int(argv[2]), int(argv[3]), 12)
now = ets - datetime.timedelta(hours=23)
while now <= ets:
copy_to_iemre(now)
now += datetime.timedelta(hours=1)
if __name__ == "__main__":
main(sys.argv)<|fim▁end|> | def main(argv):
"""Go Main""" |
<|file_name|>quickcheck.rs<|end_file_name|><|fim▁begin|>#![cfg(feature="quickcheck")]
extern crate quickcheck;
extern crate rand;
extern crate petgraph;
use rand::Rng;
use petgraph::{Graph, GraphMap, Undirected, Directed, EdgeType, Incoming, Outgoing};
use petgraph::algo::{
min_spanning_tree,
is_cyclic_undirected,
is_isomorphic,
is_isomorphic_matching,
};
use petgraph::graph::{IndexType, node_index, edge_index};
#[cfg(feature = "stable_graph")]
use petgraph::graph::stable::StableGraph;
fn prop(g: Graph<(), u32>) -> bool {
// filter out isolated nodes
let no_singles = g.filter_map(
|nx, w| g.neighbors_undirected(nx).next().map(|_| w),
|_, w| Some(w));
for i in no_singles.node_indices() {
assert!(no_singles.neighbors_undirected(i).count() > 0);
}
assert_eq!(no_singles.edge_count(), g.edge_count());
let mst = min_spanning_tree(&no_singles);
assert!(!is_cyclic_undirected(&mst));
true
}
fn prop_undir(g: Graph<(), u32, Undirected>) -> bool {
// filter out isolated nodes
let no_singles = g.filter_map(
|nx, w| g.neighbors_undirected(nx).next().map(|_| w),
|_, w| Some(w));
for i in no_singles.node_indices() {
assert!(no_singles.neighbors_undirected(i).count() > 0);
}
assert_eq!(no_singles.edge_count(), g.edge_count());
let mst = min_spanning_tree(&no_singles);
assert!(!is_cyclic_undirected(&mst));
true
}
#[test]
fn arbitrary() {
quickcheck::quickcheck(prop as fn(_) -> bool);
quickcheck::quickcheck(prop_undir as fn(_) -> bool);
}
#[test]
fn reverse_undirected() {
fn prop<Ty: EdgeType>(g: Graph<(), (), Ty>) -> bool {
if g.edge_count() > 30 {
return true; // iso too slow
}
let mut h = g.clone();
h.reverse();
is_isomorphic(&g, &h)
}
quickcheck::quickcheck(prop as fn(Graph<_, _, Undirected>) -> bool);
}
fn assert_graph_consistent<N, E, Ty, Ix>(g: &Graph<N, E, Ty, Ix>)
where Ty: EdgeType,
Ix: IndexType,
{
assert_eq!(g.node_count(), g.node_indices().count());
assert_eq!(g.edge_count(), g.edge_indices().count());
for edge in g.raw_edges() {
assert!(g.find_edge(edge.source(), edge.target()).is_some(),
"Edge not in graph! {:?} to {:?}", edge.source(), edge.target());
}
}
#[test]
fn reverse_directed() {
fn prop<Ty: EdgeType>(mut g: Graph<(), (), Ty>) -> bool {
let node_outdegrees = g.node_indices()
.map(|i| g.neighbors_directed(i, Outgoing).count())
.collect::<Vec<_>>();
let node_indegrees = g.node_indices()
.map(|i| g.neighbors_directed(i, Incoming).count())
.collect::<Vec<_>>();
g.reverse();
let new_outdegrees = g.node_indices()
.map(|i| g.neighbors_directed(i, Outgoing).count())
.collect::<Vec<_>>();
let new_indegrees = g.node_indices()
.map(|i| g.neighbors_directed(i, Incoming).count())
.collect::<Vec<_>>();
assert_eq!(node_outdegrees, new_indegrees);
assert_eq!(node_indegrees, new_outdegrees);
assert_graph_consistent(&g);
true
}
quickcheck::quickcheck(prop as fn(Graph<_, _, Directed>) -> bool);
}
#[test]
fn retain_nodes() {
fn prop<Ty: EdgeType>(mut g: Graph<i32, i32, Ty>) -> bool {
// Remove all negative nodes, these should be randomly spread
let og = g.clone();
let nodes = g.node_count();
let num_negs = g.raw_nodes().iter().filter(|n| n.weight < 0).count();
let mut removed = 0;
g.retain_nodes(|g, i| {
let keep = g[i] >= 0;
if !keep {
removed += 1;
}
keep
});
let num_negs_post = g.raw_nodes().iter().filter(|n| n.weight < 0).count();
let num_pos_post = g.raw_nodes().iter().filter(|n| n.weight >= 0).count();
assert_eq!(num_negs_post, 0);
assert_eq!(removed, num_negs);
assert_eq!(num_negs + g.node_count(), nodes);
assert_eq!(num_pos_post, g.node_count());
// check against filter_map
let filtered = og.filter_map(|_, w| if *w >= 0 { Some(*w) } else { None },
|_, w| Some(*w));
assert_eq!(g.node_count(), filtered.node_count());
/*
println!("Iso of graph with nodes={}, edges={}",
g.node_count(), g.edge_count());
*/
assert!(is_isomorphic_matching(&filtered, &g, PartialEq::eq, PartialEq::eq));
true
}
quickcheck::quickcheck(prop as fn(Graph<_, _, Directed>) -> bool);
quickcheck::quickcheck(prop as fn(Graph<_, _, Undirected>) -> bool);
}
#[test]
fn retain_edges() {
fn prop<Ty: EdgeType>(mut g: Graph<(), i32, Ty>) -> bool {
// Remove all negative edges, these should be randomly spread
let og = g.clone();
let edges = g.edge_count();
let num_negs = g.raw_edges().iter().filter(|n| n.weight < 0).count();
let mut removed = 0;
g.retain_edges(|g, i| {
let keep = g[i] >= 0;
if !keep {
removed += 1;
}
keep
});
let num_negs_post = g.raw_edges().iter().filter(|n| n.weight < 0).count();
let num_pos_post = g.raw_edges().iter().filter(|n| n.weight >= 0).count();
assert_eq!(num_negs_post, 0);
assert_eq!(removed, num_negs);
assert_eq!(num_negs + g.edge_count(), edges);
assert_eq!(num_pos_post, g.edge_count());
if og.edge_count() < 30 {
// check against filter_map
let filtered = og.filter_map(
|_, w| Some(*w),
|_, w| if *w >= 0 { Some(*w) } else { None });
assert_eq!(g.node_count(), filtered.node_count());
assert!(is_isomorphic(&filtered, &g));
}
true
}
quickcheck::quickcheck(prop as fn(Graph<_, _, Directed>) -> bool);
quickcheck::quickcheck(prop as fn(Graph<_, _, Undirected>) -> bool);
}
#[test]
fn isomorphism_1() {
// using small weights so that duplicates are likely
fn prop<Ty: EdgeType>(g: Graph<i8, i8, Ty>) -> bool {
let mut rng = rand::thread_rng();
// several trials of different isomorphisms of the same graph
// mapping of node indices
let mut map = g.node_indices().collect::<Vec<_>>();
let mut ng = Graph::<_, _, Ty>::with_capacity(g.node_count(), g.edge_count());
for _ in 0..1 {
rng.shuffle(&mut map);
ng.clear();
for _ in g.node_indices() {
ng.add_node(0);
}
// Assign node weights
for i in g.node_indices() {
ng[map[i.index()]] = g[i];
}
// Add edges
for i in g.edge_indices() {
let (s, t) = g.edge_endpoints(i).unwrap();
ng.add_edge(map[s.index()],
map[t.index()],
g[i]);
}
if g.node_count() < 20 && g.edge_count() < 50 {
assert!(is_isomorphic(&g, &ng));
}
assert!(is_isomorphic_matching(&g, &ng, PartialEq::eq, PartialEq::eq));
}
true
}
quickcheck::quickcheck(prop::<Undirected> as fn(_) -> bool);
quickcheck::quickcheck(prop::<Directed> as fn(_) -> bool);
}
#[test]
fn isomorphism_modify() {
// using small weights so that duplicates are likely
fn prop<Ty: EdgeType>(g: Graph<i16, i8, Ty>, node: u8, edge: u8) -> bool {
let mut ng = g.clone();
let i = node_index(node as usize);
let j = edge_index(edge as usize);
if i.index() < g.node_count() {
ng[i] = (g[i] == 0) as i16;
}
if j.index() < g.edge_count() {
ng[j] = (g[j] == 0) as i8;
}
if i.index() < g.node_count() || j.index() < g.edge_count() {
assert!(!is_isomorphic_matching(&g, &ng, PartialEq::eq, PartialEq::eq));
} else {
assert!(is_isomorphic_matching(&g, &ng, PartialEq::eq, PartialEq::eq));
}
true
}
quickcheck::quickcheck(prop::<Undirected> as fn(_, _, _) -> bool);
quickcheck::quickcheck(prop::<Directed> as fn(_, _, _) -> bool);
}
#[test]
fn graph_remove_edge() {
fn prop<Ty: EdgeType>(mut g: Graph<(), (), Ty>, a: u8, b: u8) -> bool {
let a = node_index(a as usize);
let b = node_index(b as usize);
let edge = g.find_edge(a, b);
if !g.is_directed() {
assert_eq!(edge.is_some(), g.find_edge(b, a).is_some());
}
if let Some(ex) = edge {
assert!(g.remove_edge(ex).is_some());
}
assert_graph_consistent(&g);
assert!(g.find_edge(a, b).is_none());
assert!(g.neighbors(a).find(|x| *x == b).is_none());
if !g.is_directed() {
assert!(g.neighbors(b).find(|x| *x == a).is_none());
}
true
}
quickcheck::quickcheck(prop as fn(Graph<_, _, Undirected>, _, _) -> bool);
quickcheck::quickcheck(prop as fn(Graph<_, _, Directed>, _, _) -> bool);
}
#[cfg(feature = "stable_graph")]
#[test]
fn stable_graph_remove_edge() {
fn prop<Ty: EdgeType>(mut g: StableGraph<(), (), Ty>, a: u8, b: u8) -> bool {
let a = node_index(a as usize);
let b = node_index(b as usize);
let edge = g.find_edge(a, b);
if !g.is_directed() {
assert_eq!(edge.is_some(), g.find_edge(b, a).is_some());
}
if let Some(ex) = edge {
assert!(g.remove_edge(ex).is_some());
}
//assert_graph_consistent(&g);
assert!(g.find_edge(a, b).is_none());
assert!(g.neighbors(a).find(|x| *x == b).is_none());
if !g.is_directed() {
assert!(g.find_edge(b, a).is_none());
assert!(g.neighbors(b).find(|x| *x == a).is_none());
}
true
}
quickcheck::quickcheck(prop as fn(StableGraph<_, _, Undirected>, _, _) -> bool);
quickcheck::quickcheck(prop as fn(StableGraph<_, _, Directed>, _, _) -> bool);
}
#[cfg(feature = "stable_graph")]
#[test]
fn stable_graph_add_remove_edges() {
fn prop<Ty: EdgeType>(mut g: StableGraph<(), (), Ty>, edges: Vec<(u8, u8)>) -> bool {
for &(a, b) in &edges {
let a = node_index(a as usize);
let b = node_index(b as usize);
let edge = g.find_edge(a, b);
if edge.is_none() && g.contains_node(a) && g.contains_node(b) {
let _index = g.add_edge(a, b, ());
continue;
}
if !g.is_directed() {
assert_eq!(edge.is_some(), g.find_edge(b, a).is_some());
}
if let Some(ex) = edge {
assert!(g.remove_edge(ex).is_some());
}
//assert_graph_consistent(&g);
assert!(g.find_edge(a, b).is_none(), "failed to remove edge {:?} from graph {:?}", (a, b), g);
assert!(g.neighbors(a).find(|x| *x == b).is_none());
if !g.is_directed() {
assert!(g.find_edge(b, a).is_none());
assert!(g.neighbors(b).find(|x| *x == a).is_none());
}
}
true
}
quickcheck::quickcheck(prop as fn(StableGraph<_, _, Undirected>, _) -> bool);
quickcheck::quickcheck(prop as fn(StableGraph<_, _, Directed>, _) -> bool);
}<|fim▁hole|> let contains = g.contains_edge(a, b);
assert_eq!(contains, g.contains_edge(b, a));
assert_eq!(g.remove_edge(a, b).is_some(), contains);
assert!(!g.contains_edge(a, b) &&
g.neighbors(a).find(|x| *x == b).is_none() &&
g.neighbors(b).find(|x| *x == a).is_none());
assert!(g.remove_edge(a, b).is_none());
true
}
quickcheck::quickcheck(prop as fn(_, _, _) -> bool);
}
#[test]
fn graphmap_add_remove() {
fn prop(mut g: GraphMap<i8, ()>, a: i8, b: i8) -> bool {
assert_eq!(g.contains_edge(a, b), g.add_edge(a, b, ()).is_some());
g.remove_edge(a, b);
!g.contains_edge(a, b) &&
g.neighbors(a).find(|x| *x == b).is_none() &&
g.neighbors(b).find(|x| *x == a).is_none()
}
quickcheck::quickcheck(prop as fn(_, _, _) -> bool);
}<|fim▁end|> |
#[test]
fn graphmap_remove() {
fn prop(mut g: GraphMap<i8, ()>, a: i8, b: i8) -> bool { |
<|file_name|>blockdev.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use std::cell::RefCell;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use chrono::{DateTime, TimeZone, Utc};
use uuid::Uuid;
use devicemapper::{Bytes, Sectors, IEC};
use super::super::engine::BlockDev;
use super::super::types::{BlockDevState, MaybeDbusPath};
use super::randomization::Randomizer;
#[derive(Debug)]
/// A simulated device.
pub struct SimDev {
devnode: PathBuf,
rdm: Rc<RefCell<Randomizer>>,
user_info: Option<String>,
hardware_info: Option<String>,
initialization_time: u64,
dbus_path: MaybeDbusPath,
}
impl BlockDev for SimDev {
fn devnode(&self) -> PathBuf {
self.devnode.clone()
}
fn user_info(&self) -> Option<&str> {
self.user_info.as_ref().map(|x| &**x)
}
fn hardware_info(&self) -> Option<&str> {
self.hardware_info.as_ref().map(|x| &**x)
}
fn initialization_time(&self) -> DateTime<Utc> {<|fim▁hole|>
fn size(&self) -> Sectors {
Bytes(IEC::Gi).sectors()
}
fn state(&self) -> BlockDevState {
BlockDevState::InUse
}
fn set_dbus_path(&mut self, path: MaybeDbusPath) {
self.dbus_path = path
}
fn get_dbus_path(&self) -> &MaybeDbusPath {
&self.dbus_path
}
}
impl SimDev {
/// Generates a new device from any devnode.
pub fn new(rdm: Rc<RefCell<Randomizer>>, devnode: &Path) -> (Uuid, SimDev) {
(
Uuid::new_v4(),
SimDev {
devnode: devnode.to_owned(),
rdm,
user_info: None,
hardware_info: None,
initialization_time: Utc::now().timestamp() as u64,
dbus_path: MaybeDbusPath(None),
},
)
}
/// Set the user info on this blockdev.
/// The user_info may be None, which unsets user info.
/// Returns true if the user info was changed, otherwise false.
pub fn set_user_info(&mut self, user_info: Option<&str>) -> bool {
set_blockdev_user_info!(self; user_info)
}
}<|fim▁end|> | Utc.timestamp(self.initialization_time as i64, 0)
} |
<|file_name|>struct_allocated_resources_in_describe_elasticity_assurances.go<|end_file_name|><|fim▁begin|>package ecs
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// AllocatedResourcesInDescribeElasticityAssurances is a nested struct in ecs response<|fim▁hole|><|fim▁end|> | type AllocatedResourcesInDescribeElasticityAssurances struct {
AllocatedResource []AllocatedResource `json:"AllocatedResource" xml:"AllocatedResource"`
} |
<|file_name|>worker.js<|end_file_name|><|fim▁begin|>var AWS = require('aws-sdk');
var Policy = require("./s3post").Policy;
var helpers = require("./helpers");
var POLICY_FILE = "policy.json";
var schedule = require('node-schedule');
var Worker = function(sqsCommnad, s3Object, simpleData){
var queue = sqsCommnad;
var s3 = s3Object;
var simpleDataAuth = simpleData;
var policyData = helpers.readJSONFile(POLICY_FILE);
var policy = new Policy(policyData);
var bucket_name = policy.getConditionValueByKey("bucket");
Worker.prototype.job = function(){
var run = schedule.scheduleJob('*/4 * * * * *',
function(){
queue.recv(function(err, data){
if (err) {
console.log(err); <|fim▁hole|> }
console.log({Body : data.Body, MD5OfBody : data.MD5OfBody});
params = {
Bucket: bucket_name,
Key: data.Body
}
s3.getObject(params, function(err, data) {
if (err) {
console.log(err, err.stack);
}
else {
var request = require('request');
var mime = require('mime');
var gm = require('gm').subClass({ imageMagick: true });
var src = 'http://s3-us-west-2.amazonaws.com/'+params.Bucket+'/'+params.Key;
gm(request(src, params.Key))
.rotate('black', 15)
.stream(function(err, stdout, stderr) {
var buf = new Buffer('');
stdout.on('data', function(res) {
buf = Buffer.concat([buf, res]);
});
stdout.on('end', function(data) {
var atr = {
Bucket: params.Bucket,
Key: params.Key,
Body: buf,
ACL: 'public-read',
Metadata: {
"username" : "Szymon Glowacki",
"ip" : "192.168.1.10"
}
};
s3.putObject(atr, function(err, res) {
console.log("done");
});
});
});
}
});
});
});
}
}
module.exports = Worker;<|fim▁end|> | return; |
<|file_name|>resource_fastly_service_v1_headers_test.go<|end_file_name|><|fim▁begin|>package fastly
import (
"fmt"
"reflect"
"sort"
"testing"
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
gofastly "github.com/sethvargo/go-fastly"
)
func TestFastlyServiceV1_BuildHeaders(t *testing.T) {
cases := []struct {
remote *gofastly.CreateHeaderInput
local map[string]interface{}
}{
{
remote: &gofastly.CreateHeaderInput{
Name: "someheadder",
Action: gofastly.HeaderActionDelete,
IgnoreIfSet: gofastly.CBool(true),
Type: gofastly.HeaderTypeCache,
Destination: "http.aws-id",
Priority: uint(100),
},
local: map[string]interface{}{
"name": "someheadder",
"action": "delete",
"ignore_if_set": true,
"destination": "http.aws-id",
"priority": 100,
"source": "",
"regex": "",
"substitution": "",
"request_condition": "",
"cache_condition": "",
"response_condition": "",
"type": "cache",
},
},
{
remote: &gofastly.CreateHeaderInput{
Name: "someheadder",
Action: gofastly.HeaderActionSet,
IgnoreIfSet: gofastly.CBool(false),
Type: gofastly.HeaderTypeCache,
Destination: "http.aws-id",
Priority: uint(100),
Source: "http.server-name",
},
local: map[string]interface{}{
"name": "someheadder",
"action": "set",
"ignore_if_set": false,
"destination": "http.aws-id",
"priority": 100,
"source": "http.server-name",
"regex": "",
"substitution": "",
"request_condition": "",
"cache_condition": "",
"response_condition": "",
"type": "cache",
},
},
}
for _, c := range cases {
out, _ := buildHeader(c.local)
if !reflect.DeepEqual(out, c.remote) {
t.Fatalf("Error matching:\nexpected: %#v\ngot: %#v", c.remote, out)
}
}
}
func TestAccFastlyServiceV1_headers_basic(t *testing.T) {
var service gofastly.ServiceDetail
name := fmt.Sprintf("tf-test-%s", acctest.RandString(10))
domainName1 := fmt.Sprintf("%s.notadomain.com", acctest.RandString(10))
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckServiceV1Destroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccServiceV1HeadersConfig(name, domainName1),
Check: resource.ComposeTestCheckFunc(
testAccCheckServiceV1Exists("fastly_service_v1.foo", &service),
testAccCheckFastlyServiceV1HeaderAttributes(&service, name, []string{"http.x-amz-request-id", "http.Server"}, nil),
resource.TestCheckResourceAttr(
"fastly_service_v1.foo", "name", name),
resource.TestCheckResourceAttr(
"fastly_service_v1.foo", "header.#", "2"),
),
},
resource.TestStep{
Config: testAccServiceV1HeadersConfig_update(name, domainName1),
Check: resource.ComposeTestCheckFunc(
testAccCheckServiceV1Exists("fastly_service_v1.foo", &service),
testAccCheckFastlyServiceV1HeaderAttributes(&service, name, []string{"http.x-amz-request-id", "http.Server"}, []string{"http.server-name"}),
resource.TestCheckResourceAttr(
"fastly_service_v1.foo", "name", name),
resource.TestCheckResourceAttr(
"fastly_service_v1.foo", "header.#", "3"),
resource.TestCheckResourceAttr(
"fastly_service_v1.foo", "header.1147514417.source", "server.identity"),
),
},
},
})
}
func testAccCheckFastlyServiceV1HeaderAttributes(service *gofastly.ServiceDetail, name string, headersDeleted, headersAdded []string) resource.TestCheckFunc {
return func(s *terraform.State) error {
if service.Name != name {
return fmt.Errorf("Bad name, expected (%s), got (%s)", name, service.Name)
}
conn := testAccProvider.Meta().(*FastlyClient).conn
headersList, err := conn.ListHeaders(&gofastly.ListHeadersInput{
Service: service.ID,
Version: service.ActiveVersion.Number,
})
if err != nil {
return fmt.Errorf("[ERR] Error looking up Headers for (%s), version (%s): %s", service.Name, service.ActiveVersion.Number, err)
}
var deleted []string
var added []string
for _, h := range headersList {
if h.Action == gofastly.HeaderActionDelete {
deleted = append(deleted, h.Destination)
}
if h.Action == gofastly.HeaderActionSet {
added = append(added, h.Destination)
}
}
sort.Strings(headersAdded)
sort.Strings(headersDeleted)
sort.Strings(deleted)
sort.Strings(added)
if !reflect.DeepEqual(headersDeleted, deleted) {
return fmt.Errorf("Deleted Headers did not match.\n\tExpected: (%#v)\n\tGot: (%#v)", headersDeleted, deleted)
}
if !reflect.DeepEqual(headersAdded, added) {
return fmt.Errorf("Added Headers did not match.\n\tExpected: (%#v)\n\tGot: (%#v)", headersAdded, added)
}
return nil
}
}
func testAccServiceV1HeadersConfig(name, domain string) string {
return fmt.Sprintf(`
resource "fastly_service_v1" "foo" {
name = "%s"
domain {
name = "%s"
comment = "tf-testing-domain"
}
backend {
address = "aws.amazon.com"
name = "amazon docs"
}
header {
destination = "http.x-amz-request-id"
type = "cache"
action = "delete"
name = "remove x-amz-request-id"
}
header {<|fim▁hole|> name = "remove s3 server"
ignore_if_set = "true"
}
force_destroy = true
}`, name, domain)
}
func testAccServiceV1HeadersConfig_update(name, domain string) string {
return fmt.Sprintf(`
resource "fastly_service_v1" "foo" {
name = "%s"
domain {
name = "%s"
comment = "tf-testing-domain"
}
backend {
address = "aws.amazon.com"
name = "amazon docs"
}
header {
destination = "http.x-amz-request-id"
type = "cache"
action = "delete"
name = "remove x-amz-request-id"
}
header {
destination = "http.Server"
type = "cache"
action = "delete"
name = "DESTROY S3"
}
header {
destination = "http.server-name"
type = "request"
action = "set"
source = "server.identity"
name = "Add server name"
}
force_destroy = true
}`, name, domain)
}<|fim▁end|> | destination = "http.Server"
type = "cache"
action = "delete" |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2017 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
from django.contrib.auth.models import User, Group, Permission
from django.core.exceptions import ValidationError
from django.core.management import call_command
from django.test import TestCase
from django.utils.encoding import force_text
from weblate.lang.models import Language
from weblate.trans.models import Project, Translation, Comment
from weblate.permissions.data import DEFAULT_GROUPS, ADMIN_PERMS
from weblate.permissions.models import AutoGroup, GroupACL
from weblate.permissions.helpers import (
has_group_perm, can_delete_comment, can_edit, can_author_translation,
)
from weblate.trans.tests.test_models import ModelTestCase
class PermissionsTest(TestCase):
def setUp(self):
self.user = User.objects.create_user(
'user', '[email protected]', 'x'
)
self.owner = User.objects.create_user(
'owner', '[email protected]', 'x'
)
self.project = Project.objects.create(slug='test')
self.project.add_user(self.owner, '@Administration')
def test_owner_owned(self):
self.assertTrue(
has_group_perm(
self.owner, 'trans.author_translation', project=self.project
)
)
def test_owner_no_perm(self):
self.assertFalse(
has_group_perm(
self.owner, 'trans.delete_project', project=self.project
)
)
def test_owner_user(self):
self.assertFalse(
has_group_perm(
self.user, 'trans.author_translation', project=self.project
)
)
def test_check_owner(self):
self.assertTrue(
has_group_perm(
self.owner, 'trans.author_translation', project=self.project
)
)
def test_check_user(self):
self.assertFalse(
has_group_perm(
self.user, 'trans.author_translation', project=self.project
)
)
def test_delete_comment_owner(self):
comment = Comment(project=self.project)
self.assertTrue(can_delete_comment(self.owner, comment))
def test_delete_comment_user(self):
comment = Comment(project=self.project)
self.assertFalse(can_delete_comment(self.user, comment))
def test_cache(self):
comment = Comment(project=self.project)
key = ('_can_delete_comment', self.project.get_full_slug())
self.assertTrue(not hasattr(self.user, 'acl_permissions_cache'))
self.assertFalse(can_delete_comment(self.user, comment))
self.assertFalse(self.user.acl_permissions_cache[key])
self.user.acl_permissions_cache[key] = True
self.assertTrue(can_delete_comment(self.user, comment))
def test_default_groups(self):
"""Check consistency of default permissions.
- The admin permissions have to contain all used permissions
"""
for group in DEFAULT_GROUPS:
self.assertEqual(
DEFAULT_GROUPS[group] - ADMIN_PERMS,
set()
)
class GroupACLTest(ModelTestCase):
PERMISSION = "trans.save_translation"
def setUp(self):
super(GroupACLTest, self).setUp()
self.user = User.objects.create_user(
"user", '[email protected]', 'x'
)
self.privileged = User.objects.create_user(
"privileged", '[email protected]', 'x'
)
self.group = Group.objects.create(name="testgroup")
self.project = self.subproject.project
self.subproject.translation_set.all().delete()
self.language = Language.objects.get_default()
self.trans = Translation.objects.create(
subproject=self.subproject, language=self.language,
filename="this/is/not/a.template"
)
app, perm = self.PERMISSION.split('.')
self.permission = Permission.objects.get(
codename=perm, content_type__app_label=app
)
self.group.permissions.add(self.permission)
self.privileged.groups.add(self.group)
def test_acl_lockout(self):
"""Basic sanity check.
Group ACL set on a subproject should only allow members of
the marked group to edit it.
"""
self.assertTrue(can_edit(self.user, self.trans, self.PERMISSION))
self.assertTrue(can_edit(self.privileged, self.trans, self.PERMISSION))
acl = GroupACL.objects.create(subproject=self.subproject)
acl.groups.add(self.group)
self.clear_permission_cache()
self.assertTrue(can_edit(self.privileged, self.trans, self.PERMISSION))
self.assertFalse(can_edit(self.user, self.trans, self.PERMISSION))
def test_acl_overlap(self):
"""ACL overlap test.
When two ACLs can apply to a translation object, only the most
specific one should apply.
"""
acl_lang = GroupACL.objects.create(language=self.language)
acl_lang.groups.add(self.group)
self.assertTrue(
can_edit(self.privileged, self.trans, self.PERMISSION))
acl_sub = GroupACL.objects.create(subproject=self.subproject)
self.clear_permission_cache()
self.assertFalse(
can_edit(self.privileged, self.trans, self.PERMISSION))
acl_sub.groups.add(self.group)
self.clear_permission_cache()
self.assertTrue(
can_edit(self.privileged, self.trans, self.PERMISSION))
def test_acl_str(self):
acl = GroupACL()
self.assertIn(
'unspecified', force_text(acl)
)
acl.language = self.language
self.assertIn(
'language=English', force_text(acl)
)
acl.subproject = self.subproject
self.assertIn(
'subproject=Test/Test', force_text(acl)
)
acl.subproject = None
acl.project = self.project
self.assertIn(
'project=Test', force_text(acl)
)
def test_acl_clean(self):
acl = GroupACL()
self.assertRaises(
ValidationError,
acl.clean
)
acl.project = self.project
acl.subproject = self.subproject
acl.save()
self.assertIsNone(acl.project)
def test_acl_project(self):
"""Basic sanity check for project-level actions.
When a Group ACL is set for a project, and only for a project,
it should apply to project-level actions on that project.
"""
acl = GroupACL.objects.get(project=self.project)
acl.groups.add(self.group)
permission = Permission.objects.get(
codename='author_translation', content_type__app_label='trans'
)
acl.permissions.add(permission)
self.group.permissions.add(permission)
self.assertFalse(
can_author_translation(self.user, self.project)
)
self.assertTrue(
can_author_translation(self.privileged, self.project)<|fim▁hole|> )
def test_affects_unrelated(self):
"""Unrelated objects test.
If I set an ACL on an object, it should not affect objects
that it doesn't match. (in this case, a different language)
"""
lang_cs = Language.objects.get(code='cs')
lang_de = Language.objects.get(code='de')
trans_cs = Translation.objects.create(
subproject=self.subproject, language=lang_cs,
filename="this/is/not/a.template"
)
trans_de = Translation.objects.create(
subproject=self.subproject, language=lang_de,
filename="this/is/not/a.template"
)
acl = GroupACL.objects.create(language=lang_cs)
acl.groups.add(self.group)
self.assertTrue(can_edit(self.privileged, trans_cs, self.PERMISSION))
self.assertFalse(can_edit(self.user, trans_cs, self.PERMISSION))
self.assertTrue(can_edit(self.privileged, trans_de, self.PERMISSION))
self.assertTrue(can_edit(self.user, trans_de, self.PERMISSION))
def test_affects_partial_match(self):
"""Partial ACL match test.
If I set an ACL on two criteria, e.g., subproject and language,
it should not affect objects that only match one of the criteria.
"""
lang_cs = Language.objects.get(code='cs')
lang_de = Language.objects.get(code='de')
trans_cs = Translation.objects.create(
subproject=self.subproject, language=lang_cs,
filename="this/is/not/a.template"
)
trans_de = Translation.objects.create(
subproject=self.subproject, language=lang_de,
filename="this/is/not/a.template"
)
acl = GroupACL.objects.create(
language=lang_cs,
subproject=self.subproject
)
acl.groups.add(self.group)
self.assertTrue(can_edit(self.privileged, trans_cs, self.PERMISSION))
self.assertFalse(can_edit(self.user, trans_cs, self.PERMISSION))
self.assertTrue(can_edit(self.privileged, trans_de, self.PERMISSION))
self.assertTrue(can_edit(self.user, trans_de, self.PERMISSION))
def clear_permission_cache(self):
"""Clear permission cache.
This is necessary when testing interaction of the built-in permissions
mechanism and Group ACL. The built-in mechanism will cache results
of `has_perm` and friends, but these can be affected by the Group ACL
lockout. Usually the cache will get cleared on every page request,
but here we need to do it manually.
"""
attribs = (
'_perm_cache',
'_user_perm_cache',
'_group_perm_cache',
'acl_permissions_cache',
'acl_permissions_owner',
'acl_permissions_groups',
)
for cache in attribs:
for user in (self.user, self.privileged):
if hasattr(user, cache):
delattr(user, cache)
def test_group_locked(self):
"""Limited privilege test.
Once a group is used in a GroupACL, it is said to be "locked".
Privileges from the locked group should not apply outside GroupACL.
I.e., if I gain "author_translation" privilege through membership
in a "privileged_group", applicable to Czech language, this should
not apply to any other language.
"""
lang_cs = Language.objects.get(code='cs')
lang_de = Language.objects.get(code='de')
trans_cs = Translation.objects.create(
subproject=self.subproject, language=lang_cs,
filename="this/is/not/a.template"
)
trans_de = Translation.objects.create(
subproject=self.subproject, language=lang_de,
filename="this/is/not/a.template"
)
perm_name = 'trans.author_translation'
permission = Permission.objects.get(
codename='author_translation', content_type__app_label='trans'
)
# Avoid conflict with automatic GroupACL
self.project.groupacl_set.all()[0].permissions.remove(permission)
self.assertFalse(can_edit(self.user, trans_cs, perm_name))
self.assertFalse(can_edit(self.privileged, trans_cs, perm_name))
self.assertFalse(can_edit(self.privileged, trans_de, perm_name))
self.clear_permission_cache()
self.group.permissions.add(permission)
self.assertFalse(can_edit(self.user, trans_cs, perm_name))
self.assertTrue(can_edit(self.privileged, trans_cs, perm_name))
self.assertTrue(can_edit(self.privileged, trans_de, perm_name))
self.clear_permission_cache()
acl = GroupACL.objects.create(language=lang_cs)
acl.groups.add(self.group)
self.assertTrue(can_edit(self.privileged, trans_cs, perm_name))
self.assertFalse(can_edit(self.privileged, trans_de, perm_name))
def test_project_specific(self):
"""Project specificity test.
Project-level actions should only be affected by Group ACLs that
are specific to the project, and don't have other criteria.
E.g., if a GroupACL lists project+language, this should not give
you project-level permissions.
"""
permission = Permission.objects.get(
codename='author_translation', content_type__app_label='trans'
)
self.group.permissions.add(permission)
acl_project_lang = GroupACL.objects.create(
language=self.language,
project=self.project
)
acl_project_lang.groups.add(self.group)
self.assertFalse(has_group_perm(
self.privileged, 'trans.author_translation', project=self.project
))
acl_project_only = GroupACL.objects.get(
language=None,
project=self.project,
)
acl_project_only.groups.add(self.group)
self.clear_permission_cache()
self.assertTrue(has_group_perm(
self.privileged, 'trans.author_translation', project=self.project
))
def test_acl_not_filtered(self):
"""Basic sanity check.
Group ACL set on a subproject should only allow members of
the marked group to edit it.
"""
self.assertTrue(can_edit(self.user, self.trans, self.PERMISSION))
self.assertTrue(can_edit(self.privileged, self.trans, self.PERMISSION))
acl = GroupACL.objects.create(subproject=self.subproject)
acl.groups.add(self.group)
acl.permissions.remove(self.permission)
self.clear_permission_cache()
self.assertTrue(can_edit(self.privileged, self.trans, self.PERMISSION))
self.assertTrue(can_edit(self.user, self.trans, self.PERMISSION))
class AutoGroupTest(TestCase):
@staticmethod
def create_user():
return User.objects.create_user('test1', '[email protected]', 'pass')
def test_default(self):
user = self.create_user()
self.assertEqual(user.groups.count(), 1)
def test_none(self):
AutoGroup.objects.all().delete()
user = self.create_user()
self.assertEqual(user.groups.count(), 0)
def test_matching(self):
AutoGroup.objects.create(
match='^.*@weblate.org',
group=Group.objects.get(name='Guests')
)
user = self.create_user()
self.assertEqual(user.groups.count(), 2)
def test_nonmatching(self):
AutoGroup.objects.create(
match='^.*@example.net',
group=Group.objects.get(name='Guests')
)
user = self.create_user()
self.assertEqual(user.groups.count(), 1)
class CommandTest(TestCase):
"""Test for management commands."""
def test_setupgroups(self):
call_command('setupgroups')
group = Group.objects.get(name='Users')
self.assertTrue(
group.permissions.filter(
codename='save_translation'
).exists()
)
call_command('setupgroups', move=True)<|fim▁end|> | |
<|file_name|>invalidForInContinueStatements.js<|end_file_name|><|fim▁begin|>continue;
ONE:
for (var x in {})
continue TWO;
TWO:
for (var x in {}) {
var fn = function () {
continue TWO;
};
}
THREE:
for (var x in {}) {
var fn = function () {
continue THREE;
};
}
<|fim▁hole|> for (var x in {}) {
}
}
NINE:
var y = 12;
for (var x in {}) {
continue NINE;
}<|fim▁end|> |
for (var x in {}) {
continue FIVE;
FIVE:
|
<|file_name|>singleton.js<|end_file_name|><|fim▁begin|>/**
* simple
*/
var mysinleton = {
property1 : 'something',
property2 : 'something else',
method1 : function() {
console.log("hello world");
}
}
/***
* encapsulation like java get、set
*/
var mysinleton2 = function() {
var privateVariable = 'something private';
function showPrivate() {
console.log(privateVariable);
}
return {
publicMethod : function(){
showPrivate();
},
publicVar : "this is a public variable"
}
}
var mysinletonInstance = new mysinleton2();
mysinletonInstance.publicMethod();
console.log(mysinletonInstance.publicVar);
/***
*
* singleton realize
*/
var Mysingleton = (function(){
var _intstance ;
function getInstance() {
return _intstance;
};
function init() {
return {
publicMethod : function() {
console.log("this is public method");
},
publicProperty : 'test'
}
};
return {
getInstance : function() {
if(_intstance) {
return _intstance;
}else {
_intstance = init();
return _intstance;
}
}
}
})();
Mysingleton.getInstance().publicMethod();
var SingletonTester = (function () {
//参数:传递给单例的一个参数集合
function Singleton(args) {
//设置args变量为接收的参数或者为空(如果没有提供的话)
var args = args || {};
//设置name参数
this.name = 'SingletonTester';
//设置pointX的值
this.pointX = args.pointX || 6; //从接收的参数里获取,或者设置为默认值
//设置pointY的值
this.pointY = args.pointY || 10;
}
//实例容器
var instance;
var _static = {
name: 'SingletonTester',
//获取实例的方法
//返回Singleton的实例
getInstance: function (args) {
if (instance === undefined) {
instance = new Singleton(args);
}
return instance;
<|fim▁hole|>})();
var singletonTest = SingletonTester.getInstance({ pointX: 5 });
console.log(singletonTest.pointX); // 输出 5<|fim▁end|> | }
};
return _static;
|
<|file_name|>prism-line-highlight.js<|end_file_name|><|fim▁begin|>(function(){
if(!window.Prism) {
return;
}
function $$(expr, con) {
return Array.prototype.slice.call((con || document).querySelectorAll(expr));
}
var CRLF = crlf = /\r?\n|\r/g;
<|fim▁hole|> var lineHeight = parseFloat(getComputedStyle(pre).lineHeight);
for (var i=0, range; range = ranges[i++];) {
range = range.split('-');
var start = +range[0],
end = +range[1] || start;
var line = document.createElement('div');
line.textContent = Array(end - start + 2).join(' \r\n');
line.className = (classes || '') + ' line-highlight';
line.setAttribute('data-start', start);
if(end > start) {
line.setAttribute('data-end', end);
}
line.style.top = (start - offset - 1) * lineHeight + 'px';
(pre.querySelector('code') || pre).appendChild(line);
}
}
function applyHash() {
var hash = location.hash.slice(1);
// Remove pre-existing temporary lines
$$('.temporary.line-highlight').forEach(function (line) {
line.parentNode.removeChild(line);
});
var range = (hash.match(/\.([\d,-]+)$/) || [,''])[1];
if (!range || document.getElementById(hash)) {
return;
}
var id = hash.slice(0, hash.lastIndexOf('.')),
pre = document.getElementById(id);
if (!pre) {
return;
}
if (!pre.hasAttribute('data-line')) {
pre.setAttribute('data-line', '');
}
highlightLines(pre, range, 'temporary ');
document.querySelector('.temporary.line-highlight').scrollIntoView();
}
var fakeTimer = 0; // Hack to limit the number of times applyHash() runs
Prism.hooks.add('after-highlight', function(env) {
var pre = env.element.parentNode;
var lines = pre && pre.getAttribute('data-line');
if (!pre || !lines || !/pre/i.test(pre.nodeName)) {
return;
}
clearTimeout(fakeTimer);
$$('.line-highlight', pre).forEach(function (line) {
line.parentNode.removeChild(line);
});
highlightLines(pre, lines);
fakeTimer = setTimeout(applyHash, 1);
});
addEventListener('hashchange', applyHash);
})();<|fim▁end|> | function highlightLines(pre, lines, classes) {
var ranges = lines.replace(/\s+/g, '').split(','),
offset = +pre.getAttribute('data-line-offset') || 0;
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import os
from subprocess import PIPE, Popen
import sys
from django.utils.encoding import force_text, DEFAULT_LOCALE_ENCODING
from django.utils import six
from .base import CommandError
def popen_wrapper(args, os_err_exc_type=CommandError):
"""
Friendly wrapper around Popen.
Returns stdout output, stderr output and OS status code.
"""
try:
p = Popen(args, shell=False, stdout=PIPE, stderr=PIPE,
close_fds=os.name != 'nt', universal_newlines=True)
except OSError as e:
strerror = force_text(e.strerror, DEFAULT_LOCALE_ENCODING,
strings_only=True)
six.reraise(os_err_exc_type, os_err_exc_type('Error executing %s: %s' %<|fim▁hole|> force_text(errors, DEFAULT_LOCALE_ENCODING, strings_only=True),
p.returncode
)
def handle_extensions(extensions=('html',), ignored=('py',)):
"""
Organizes multiple extensions that are separated with commas or passed by
using --extension/-e multiple times. Note that the .py extension is ignored
here because of the way non-*.py files are handled in make_messages() (they
are copied to file.ext.py files to trick xgettext to parse them as Python
files).
For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
would result in an extension list: ['.js', '.txt', '.xhtml']
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
{'.html', '.js'}
>>> handle_extensions(['.html, txt,.tpl'])
{'.html', '.tpl', '.txt'}
"""
ext_list = []
for ext in extensions:
ext_list.extend(ext.replace(' ', '').split(','))
for i, ext in enumerate(ext_list):
if not ext.startswith('.'):
ext_list[i] = '.%s' % ext_list[i]
return set(x for x in ext_list if x.strip('.') not in ignored)
def find_command(cmd, path=None, pathext=None):
if path is None:
path = os.environ.get('PATH', '').split(os.pathsep)
if isinstance(path, six.string_types):
path = [path]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD').split(os.pathsep)
# don't use extensions if the command ends with one of them
for ext in pathext:
if cmd.endswith(ext):
pathext = ['']
break
# check if we find the command on PATH
for p in path:
f = os.path.join(p, cmd)
if os.path.isfile(f):
return f
for ext in pathext:
fext = f + ext
if os.path.isfile(fext):
return fext
return None<|fim▁end|> | (args[0], strerror)), sys.exc_info()[2])
output, errors = p.communicate()
return (
output, |
<|file_name|>attribute_frontend.go<|end_file_name|><|fim▁begin|>// Copyright 2015, Cyrill @ Schumacher.fm and the CoreStore contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package eav
type (
// AttributeFrontendModeller defines the attribute frontend model @todo
// @see magento2/site/app/code/Magento/Eav/Model/Entity/Attribute/Frontend/AbstractFrontend.php
AttributeFrontendModeller interface {
InputRenderer() FrontendInputRendererIFace
GetValue()
GetInputType() string
// @todo
// Config to configure the current instance
Config(...AttributeFrontendConfig) AttributeFrontendModeller
}
// FrontendInputRendererIFace see table catalog_eav_attribute.frontend_input_renderer @todo
// Stupid name :-( Fix later.
FrontendInputRendererIFace interface {
// TBD()
}
// AttributeFrontend implements abstract functions @todo
// @see magento2/site/app/code/Magento/Eav/Model/Entity/Attribute/Backend/AbstractBackend.php
AttributeFrontend struct {
// a is the reference to the already created attribute during init() call in a package.
// Do not modify a here
a *Attribute
// idx references to the generated constant and therefore references to itself. mainly used in
// backend|source|frontend|etc_model
idx AttributeIndex
}
AttributeFrontendConfig func(*AttributeFrontend)
)
var _ AttributeFrontendModeller = (*AttributeFrontend)(nil)
// NewAttributeFrontend creates a pointer to a new attribute source
func NewAttributeFrontend(cfgs ...AttributeFrontendConfig) *AttributeFrontend {
as := &AttributeFrontend{
a: nil,
}
as.Config(cfgs...)
return as
}
// AttributeFrontendIdx only used in generated code to set the current index in the attribute slice
func AttributeFrontendIdx(i AttributeIndex) AttributeFrontendConfig {
return func(as *AttributeFrontend) {
as.idx = i
}
}
// Config runs the configuration functions
func (af *AttributeFrontend) Config(configs ...AttributeFrontendConfig) AttributeFrontendModeller {
for _, cfg := range configs {
cfg(af)
}
return af
}
func (af *AttributeFrontend) InputRenderer() FrontendInputRendererIFace { return nil }<|fim▁hole|>func (af *AttributeFrontend) GetValue() {}
func (af *AttributeFrontend) GetInputType() string {
return af.a.FrontendInput()
}<|fim▁end|> | |
<|file_name|>elo.rs<|end_file_name|><|fim▁begin|>// This file was taken from the rust-elo project since it's not a published
// Cargo crate. Check out https://github.com/CarlColglazier/rust-elo :]
// disregard dead code since this is an API
#![allow(dead_code)]
/// Elo.
pub trait Elo {
/// Get the rating.
fn get_rating(&self) -> f32;
/// Set the rating.
fn change_rating(&mut self, rating: f32);
}
fn expected_rating<T: Elo>(player_one: &T, player_two: &T) -> f32 {
return 1.0f32 / (1.0f32 + 10f32.powf(
(player_two.get_rating() - player_one.get_rating()) / 400f32
));
}
/// EloRanking.
pub struct EloRanking {
k_factor: usize,
}
impl EloRanking {
/// Create a new Elo ranking system.
///
/// # Example
///
/// ```
/// # use elo::EloRanking;
/// let k_factor: usize = 32;
/// let elo_ranking = EloRanking::new(k_factor);
/// ```
pub fn new(k: usize) -> EloRanking {
return EloRanking {
k_factor: k,
}
}
/// Change the K factor.
///
/// # Example
///
/// ```
/// # use elo::EloRanking;
/// # let mut elo_ranking = EloRanking::new(32);
/// elo_ranking.set_k_factor(25);
/// ```
pub fn set_k_factor(&mut self, k: usize) {
self.k_factor = k;
}
/// Returns the K factor.
///
/// # Example
///
/// ```
/// # use elo::EloRanking;
/// # let elo_ranking = EloRanking::new(32);
/// assert_eq!(32, elo_ranking.get_k_factor());
/// ```
pub fn get_k_factor(&self) -> usize {
return self.k_factor;
}
/// Internal method for generic calculations.
fn calculate_rating<T: Elo>(&self,
player_one: &mut T,
player_two: &mut T,
score: f32) {
let change = self.k_factor as f32 *
(score - expected_rating::<T>(player_one, player_two));
player_one.change_rating(change);
player_two.change_rating(-change);
}
pub fn win<T: Elo>(&self, winner: &mut T, loser: &mut T) {
self.calculate_rating(winner, loser, 1.0);
}
pub fn tie<T: Elo>(&self, player_one: &mut T, player_two: &mut T) {
self.calculate_rating(player_one, player_two, 0.5);
}
pub fn loss<T: Elo>(&self, loser: &mut T, winner: &mut T) {
self.win::<T>(winner, loser);
}
}
#[cfg(test)]
mod tests {
use super::*;
struct RatingObject {
rating: f32,
}
impl RatingObject {
pub fn new() -> RatingObject {
return RatingObject {
rating: 1400f32,
};
}
}
impl Elo for RatingObject {
fn get_rating(&self) -> f32 {
return self.rating;
}
fn change_rating(&mut self, rating: f32) {
self.rating += rating;
}
}
#[test]
fn rating() {
let mut player_one = RatingObject::new();
let mut player_two = RatingObject::new();
let rating_system = EloRanking::new(32);
assert_eq!(1400f32, player_one.get_rating());
assert_eq!(1400f32, player_two.get_rating());
player_one.change_rating(100f32);
assert_eq!(1500f32, player_one.get_rating());
player_one.change_rating(-100f32);
assert_eq!(1400f32, player_one.get_rating());
// In a tie, the ratings should stay the same.
rating_system.tie::<RatingObject>(&mut player_one, &mut player_two);
assert_eq!(1400f32, player_one.get_rating());<|fim▁hole|> assert_eq!(1384f32, player_two.get_rating());
// With a loss, this should reset to normal.
rating_system.loss::<RatingObject>(&mut player_one, &mut player_two);
assert_eq!(1398.5305f32, player_one.get_rating());
assert_eq!(1401.4695f32, player_two.get_rating());
}
}<|fim▁end|> | assert_eq!(1400f32, player_two.get_rating());
// With a win, player_one should gain an advantage.
rating_system.win::<RatingObject>(&mut player_one, &mut player_two);
assert_eq!(1416f32, player_one.get_rating()); |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2013 Christopher L. Felton
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
try:
from setuptools import setup
from setuptools import find_packages
except ImportError:
from distutils.core import setup
from pkgutil import walk_packages
import mn
# many pypy installs don't have setuptools (?)
def _find_packages(path='.', prefix=''):
yield prefix
prefix = prefix + "."
for _, name, ispkg in walk_packages(path,
prefix,
onerror=lambda x: x):
if ispkg:
yield name
def find_packages():
return list(_find_packages(mn.__path__, mn.__name__))
setup(name = "minnesota",
version = "0.1pre",
description = "collection of HDL cores ",
license = "LGPL",
platforms = ["Any"],
keywords = "DSP HDL MyHDL FPGA FX2 USB",
<|fim▁hole|>
packages = find_packages(),
# @todo need to add the examples and test directories,
# copy it over ...
)<|fim▁end|> | |
<|file_name|>resource_loader.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># ==============================================================================
"""Resource management library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os as _os
import sys as _sys
from tensorflow.python.util import tf_inspect as _inspect
from tensorflow.python.util.tf_export import tf_export
# pylint: disable=g-import-not-at-top
try:
from rules_python.python.runfiles import runfiles
except ImportError:
runfiles = None
# pylint: enable=g-import-not-at-top
@tf_export(v1=['resource_loader.load_resource'])
def load_resource(path):
"""Load the resource at given path, where path is relative to tensorflow/.
Args:
path: a string resource path relative to tensorflow/.
Returns:
The contents of that resource.
Raises:
IOError: If the path is not found, or the resource can't be opened.
"""
with open(get_path_to_datafile(path), 'rb') as f:
return f.read()
# pylint: disable=protected-access
@tf_export(v1=['resource_loader.get_data_files_path'])
def get_data_files_path():
"""Get a direct path to the data files colocated with the script.
Returns:
The directory where files specified in data attribute of py_test
and py_binary are stored.
"""
return _os.path.dirname(_inspect.getfile(_sys._getframe(1)))
@tf_export(v1=['resource_loader.get_root_dir_with_all_resources'])
def get_root_dir_with_all_resources():
"""Get a root directory containing all the data attributes in the build rule.
Returns:
The path to the specified file present in the data attribute of py_test
or py_binary. Falls back to returning the same as get_data_files_path if it
fails to detect a bazel runfiles directory.
"""
script_dir = get_data_files_path()
# Create a history of the paths, because the data files are located relative
# to the repository root directory, which is directly under runfiles
# directory.
directories = [script_dir]
data_files_dir = ''
while True:
candidate_dir = directories[-1]
current_directory = _os.path.basename(candidate_dir)
if '.runfiles' in current_directory:
# Our file should never be directly under runfiles.
# If the history has only one item, it means we are directly inside the
# runfiles directory, something is wrong, fall back to the default return
# value, script directory.
if len(directories) > 1:
data_files_dir = directories[-2]
break
else:
new_candidate_dir = _os.path.dirname(candidate_dir)
# If we are at the root directory these two will be the same.
if new_candidate_dir == candidate_dir:
break
else:
directories.append(new_candidate_dir)
return data_files_dir or script_dir
@tf_export(v1=['resource_loader.get_path_to_datafile'])
def get_path_to_datafile(path):
"""Get the path to the specified file in the data dependencies.
The path is relative to tensorflow/
Args:
path: a string resource path relative to tensorflow/
Returns:
The path to the specified file present in the data attribute of py_test
or py_binary.
Raises:
IOError: If the path is not found, or the resource can't be opened.
"""
# First, try finding in the new path.
if runfiles:
r = runfiles.Create()
new_fpath = r.Rlocation(
_os.path.abspath(_os.path.join('tensorflow', path)))
if new_fpath is not None and _os.path.exists(new_fpath):
return new_fpath
# Then, the old style path, as people became dependent on this buggy call.
old_filepath = _os.path.join(
_os.path.dirname(_inspect.getfile(_sys._getframe(1))), path)
return old_filepath
@tf_export(v1=['resource_loader.readahead_file_path'])
def readahead_file_path(path, readahead='128M'): # pylint: disable=unused-argument
"""Readahead files not implemented; simply returns given path."""
return path<|fim▁end|> | # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. |
<|file_name|>pic_carver.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
@author : 'Muhammad Arslan <[email protected]>'
"""
import re
import zlib
import cv2
from scapy.all import *
pics = "pictues"
faces_dir = "faces"
pcap_file = "bhp.pcap"
def get_http_headers(http_payload):
try:
headers_raw = http_payload[:http_payload.index("\r\n\r\n")+2]
headers = dict(re.findall(r"(?P<'name>.*?): (?P<value>.*?)\r\n", headers_raw))
except:
return None
def extract_images(headers, http_payload):
image = None
image_type = None
try:
if "image" in headers['Content-Type']:
image_type = headers['Content-Type'].split('/')[1]
image = http_payload[http_payload.index('\r\n\r\n') + 4:]
try:
if "Content-Encoding" in headers.keys():
if headers['Content-Encoding'] == 'gzip':
image = zlib.decompress(image, 16+zlib.MAX_WBITS)
elif headers['Content-Encoding'] == "deflate":
image = zlib.decompress(image)
except:
pass
except:
return None, None
return image, image_type
def face_detect(path, filename):
img = cv2.imread(path)
cascade = cv2.CascadeClassifier("haarcascade_frontalface_alt.xml")
rects = cascade.detectMultiScale(img, 1.3, 4, cv2.cv.CV_HAAR_SCALE_IMAGE, (20, 20))
if len(rects) == 0:
return False
rects[:, 2:] += rects[:, :2]<|fim▁hole|>
cv2.imwrite("%s/$s-%s" % (faces_dir, pcap_file, filename), img)
return True
def http_assembler(pcap_file):
carved_images = 0
faces_detected = 0
a = rdpcap(pcap_file)
sessions = a.sessions()
for session in sessions:
http_payload = ""
for packet in sessions[session]:
try:
if packet[TCP].dport == 80 or packet[TCP].sport == 80:
http_payload += str(packet[TCP].payload)
except:
pass
headers = get_http_headers(http_payload)
if headers is None:
continue
image, image_type = extract_image(headers, http_payload)
if image is not None and image_type is not None:
file_name = "%s-pic_carver_%d.%s" % (pcap_file, carved_images, image_type)
with open("%s/%s" % (pics, file_name), "wb") as fd:
fd.write(image)
carved_images += 1
try:
result = face_detect("%s/%s" % (pics, file_name), file_name)
if result is True:
faces_detected += 1
except:
pass
return carved_images, faces_detected
carved_images, faces_detected = http_assembler(pcap_file)
print "Extracted: %d images" % carved_images
print "Detected: %d faces" % faces_detected<|fim▁end|> |
for x1, y1, x2, y2 in rects:
cv2.rectangle(img, (x1, y1), (x2, y2), (127, 255, 0), 2) |
<|file_name|>issue-50061.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(path_statements)]<|fim▁hole|>extern crate issue_50061;
macro inner(any_token $v: tt) {
$v
}
macro outer($v: tt) {
inner!(any_token $v)
}
#[issue_50061::check]
fn main() {
//! this doc comment forces roundtrip through a string
let checkit = 0;
outer!(checkit);
}<|fim▁end|> | // aux-build:issue-50061.rs
#![feature(decl_macro)]
|
<|file_name|>playerwrapper.js<|end_file_name|><|fim▁begin|>var PlayerWrapper = function() {
this.underlyingPlayer = 'aurora';
this.aurora = {};
this.sm2 = {};
this.duration = 0;
this.volume = 100;
return this;
};
PlayerWrapper.prototype = _.extend({}, OC.Backbone.Events);
PlayerWrapper.prototype.play = function() {
switch(this.underlyingPlayer) {
case 'sm2':
this.sm2.play('ownCloudSound');
break;
case 'aurora':
this.aurora.play();
break;
}
};
PlayerWrapper.prototype.stop = function() {
switch(this.underlyingPlayer) {
case 'sm2':
this.sm2.stop('ownCloudSound');
this.sm2.destroySound('ownCloudSound');
break;
case 'aurora':
if(this.aurora.asset !== undefined) {
// check if player's constructor has been called,
// if so, stop() will be available
this.aurora.stop();
}
break;
}
};
PlayerWrapper.prototype.togglePlayback = function() {
switch(this.underlyingPlayer) {
case 'sm2':
this.sm2.togglePause('ownCloudSound');
break;
case 'aurora':
this.aurora.togglePlayback();
break;
}
};
PlayerWrapper.prototype.seekingSupported = function() {
// Seeking is not implemented in aurora/flac.js and does not work on all
// files with aurora/mp3.js. Hence, we disable seeking with aurora.
return this.underlyingPlayer == 'sm2';
};
PlayerWrapper.prototype.pause = function() {
switch(this.underlyingPlayer) {
case 'sm2':
this.sm2.pause('ownCloudSound');
break;
case 'aurora':
this.aurora.pause();
break;
}
};
PlayerWrapper.prototype.seek = function(percentage) {
if (this.seekingSupported()) {
console.log('seek to '+percentage);
switch(this.underlyingPlayer) {
case 'sm2':
this.sm2.setPosition('ownCloudSound', percentage * this.duration);
break;
case 'aurora':
this.aurora.seek(percentage * this.duration);
break;
}
}
else {
console.log('seeking is not supported for this file');
}
};
PlayerWrapper.prototype.setVolume = function(percentage) {
this.volume = percentage;
switch(this.underlyingPlayer) {
case 'sm2':
this.sm2.setVolume('ownCloudSound', this.volume);
break;
case 'aurora':
this.aurora.volume = this.volume;
break;
}
};
PlayerWrapper.prototype.fromURL = function(typeAndURL) {
var self = this;
var url = typeAndURL['url'];
var type = typeAndURL['type'];
if (soundManager.canPlayURL(url)) {
this.underlyingPlayer = 'sm2';
} else {
this.underlyingPlayer = 'aurora';
}
console.log('Using ' + this.underlyingPlayer + ' for type ' + type + ' URL ' + url);
switch(this.underlyingPlayer) {
case 'sm2':
this.sm2 = soundManager.setup({
html5PollingInterval: 200
});
this.sm2.html5Only = true;
this.sm2.createSound({
id: 'ownCloudSound',
url: url,
whileplaying: function() {
self.trigger('progress', this.position);
},
whileloading: function() {
self.duration = this.durationEstimate;
self.trigger('duration', this.durationEstimate);
// The buffer may contain holes after seeking but just ignore those.
// Show the buffering status according the last buffered position.
var bufCount = this.buffered.length;
var bufEnd = (bufCount > 0) ? this.buffered[bufCount-1].end : 0;
self.trigger('buffer', bufEnd / this.durationEstimate * 100);
},
onfinish: function() {
self.trigger('end');
},
onload: function(success) {
if (success) {
self.trigger('ready');
} else {
console.log('SM2: sound load error');
}
}
});
break;
case 'aurora':<|fim▁hole|>
this.aurora.on('buffer', function(percent) {
self.trigger('buffer', percent);
});
this.aurora.on('progress', function(currentTime) {//currentTime is in msec
self.trigger('progress', currentTime);
});
this.aurora.on('ready', function() {
self.trigger('ready');
});
this.aurora.on('end', function() {
self.trigger('end');
});
this.aurora.on('duration', function(msecs) {
self.duration = msecs;
self.trigger('duration', msecs);
});
break;
}
// Set the current volume to the newly created player instance
this.setVolume(this.volume);
};
PlayerWrapper.prototype.setVolume = function(vol) {
console.log("setVolume not implemented");// TODO
};<|fim▁end|> | this.aurora = AV.Player.fromURL(url);
this.aurora.asset.source.chunkSize=524288; |
<|file_name|>quotemention.js<|end_file_name|><|fim▁begin|>/*global document, window, gdn, jQuery*/
jQuery(($) => {
// Check if an element's top is visible in the viewport.
function inview(target) {
target = $(target);
return target.length && target.offset().top > window.pageYOffset;
}
// Find the previous comment of the mentioned user in this discussion.
function get(mention) {
// Extract the CommentID or DiscussionID from the parent item.
const commentID = mention.closest('.Item')[0].id.replace(/\D+/, '');
// Extract the name of the mentioned user.
const username = mention[0].innerHTML.replace(/^@"?(.*?)"?$/, '$1');
return $.getJSON(gdn.url(
'plugin/quotemention' +
'/' + gdn.definition('DiscussionID') +
'/' + commentID +
'/' + encodeURIComponent(username)
));
}
// mouseenter handler: Show a tooltip and/or highlight a post.
function show({currentTarget}) {
const mention = $(currentTarget)
// Keep track of the hover state manually for the "done" callback.
.data('mouseOver', '1');
const loaded = mention.data('quoteMention');
const showProgress = gdn.definition('quoteMention.showProgress', true);
let target;
if (loaded !== undefined) {<|fim▁hole|> // If the mouse is still over the element, highlight the referenced post.
if (mention.data('mouseOver')) {
target = $(data.target).addClass('mentionHighlight');
// Hide the tooltip if the target post is visible.
mention.tooltipster(inview(target) ? 'hide' : 'show');
}
mention
// Replace the content with the actual post.
.tooltipster('content', data.html)
// Save the target for highlighting.
.data('quoteMention', data.target);
})
.fail(() => {
// No post found or request failed: Remove the tooltip.
mention
.tooltipster('disable')
// Prevent further requests.
.data('quoteMention', false);
});
}
// Show the tooltip if it is loading or if the post is not fully visible.
if ((!loaded && showProgress) || (loaded && !inview(target))) {
mention.tooltipster('show');
}
}
// mouseleave handler: Hide a tooltip.
function hide({currentTarget}) {
const mention = $(currentTarget)
.tooltipster('hide')
.data('mouseOver', '');
$(mention.data('quoteMention')).removeClass('mentionHighlight');
}
// Register event handlers for all mentions on the page.
function init() {
const maxWidth = gdn.definition('quoteMention.maxWidth', 350);
const position = gdn.definition('quoteMention.position', 'bottom');
// Initialized mentions get the "quoteMention" class.
$('.ItemComment .Message a:not(.quoteMention)')
// Only grab links that start with an @.
.filter((ignore, {innerHTML}) => innerHTML.substring(0, 1) === '@')
.addClass('quoteMention')
// Initialize the tooltip with the progress animation.
.tooltipster({
content: '<span class="Progress"/>',
contentAsHTML: true,
trigger: 'custom',
position,
speed: 0,
updateAnimation: false,
theme: 'tooltipster-vanilla',
maxWidth
})
.hover(show, hide);
}
// Search for new mentions when comments are added or changed.
$(document).on('CommentAdded CommentEditingComplete CommentPagingComplete', init);
init();
});<|fim▁end|> | target = $(loaded).addClass('mentionHighlight');
} else {
get(mention)
.done((data) => { |
<|file_name|>objs_graphics.py<|end_file_name|><|fim▁begin|>from gi.repository import Gtk
import os
class ExportDialog(Gtk.Dialog):
def __init__(self,parent,*args):
Gtk.Dialog.__init__(self, "Exportieren", parent, 0,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK))
self.set_default_size(150,150)
self.contentarea=self.get_content_area()
self.selection_type=""
self.selection_folder=""
self.combo_store=Gtk.ListStore(str)
self.combo_store.append(["CSV"])
self.combo_store.append(["XML"])
self.combo=Gtk.ComboBox.new_with_model_and_entry(self.combo_store)
self.combo.connect("changed",self.update_select_type)<|fim▁hole|> self.filechooser = Gtk.FileChooserButton(Gtk.FileChooserAction.CREATE_FOLDER)
self.filechooser.set_create_folders(True)
self.filechooser.set_action(Gtk.FileChooserAction.SELECT_FOLDER)
self.filechooser.connect("file-set",self.update_select_folder)
self.contentarea.add(self.filechooser)
self.show_all()
def update_select_type(self,combo,*args):
treit=combo.get_active_iter()
if(treit == None):
return
self.selection_type=combo.get_model()[treit][0]
return
def update_select_folder(self,chooser,*args):
self.selection_folder=chooser.get_filename()<|fim▁end|> | self.combo.set_entry_text_column(0)
self.contentarea.add(self.combo)
|
<|file_name|>logging_metrics_pb2_grpc.py<|end_file_name|><|fim▁begin|># Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
import google.cloud.proto.logging.v2.logging_metrics_pb2 as google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2
import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2
class MetricsServiceV2Stub(object):
"""Service for configuring logs-based metrics.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListLogMetrics = channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/ListLogMetrics',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString,
)
self.GetLogMetric = channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/GetLogMetric',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.FromString,
)
self.CreateLogMetric = channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/CreateLogMetric',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.FromString,
)
self.UpdateLogMetric = channel.unary_unary(<|fim▁hole|> self.DeleteLogMetric = channel.unary_unary(
'/google.logging.v2.MetricsServiceV2/DeleteLogMetric',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class MetricsServiceV2Servicer(object):
"""Service for configuring logs-based metrics.
"""
def ListLogMetrics(self, request, context):
"""Lists logs-based metrics.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetLogMetric(self, request, context):
"""Gets a logs-based metric.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateLogMetric(self, request, context):
"""Creates a logs-based metric.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateLogMetric(self, request, context):
"""Creates or updates a logs-based metric.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteLogMetric(self, request, context):
"""Deletes a logs-based metric.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_MetricsServiceV2Servicer_to_server(servicer, server):
rpc_method_handlers = {
'ListLogMetrics': grpc.unary_unary_rpc_method_handler(
servicer.ListLogMetrics,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString,
response_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString,
),
'GetLogMetric': grpc.unary_unary_rpc_method_handler(
servicer.GetLogMetric,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.GetLogMetricRequest.FromString,
response_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.SerializeToString,
),
'CreateLogMetric': grpc.unary_unary_rpc_method_handler(
servicer.CreateLogMetric,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString,
response_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.SerializeToString,
),
'UpdateLogMetric': grpc.unary_unary_rpc_method_handler(
servicer.UpdateLogMetric,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString,
response_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.SerializeToString,
),
'DeleteLogMetric': grpc.unary_unary_rpc_method_handler(
servicer.DeleteLogMetric,
request_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.logging.v2.MetricsServiceV2', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))<|fim▁end|> | '/google.logging.v2.MetricsServiceV2/UpdateLogMetric',
request_serializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_proto_dot_logging_dot_v2_dot_logging__metrics__pb2.LogMetric.FromString,
) |
<|file_name|>transmute-equal-assoc-types.rs<|end_file_name|><|fim▁begin|>trait Foo {
type Bar;<|fim▁hole|>}
unsafe fn noop<F: Foo>(foo: F::Bar) -> F::Bar {
::std::mem::transmute(foo) //~ ERROR cannot transmute between types of different sizes
}
fn main() {}<|fim▁end|> | |
<|file_name|>inject.js<|end_file_name|><|fim▁begin|>/* Copyright (C) 2016 R&D Solutions Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var path = require('path');
var gulp = require('gulp');
var conf = require('./conf');
var $ = require('gulp-load-plugins')();
var wiredep = require('wiredep').stream;
var _ = require('lodash');
var browserSync = require('browser-sync');
gulp.task('inject-reload', ['inject'], function () {
browserSync.reload();
});
gulp.task('inject', ['scripts'], function () {
var injectStyles = gulp.src([
path.join(conf.paths.src, '/app/**/*.css')
], {read: false});
var injectScripts = gulp.src([
path.join(conf.paths.src, '/app/**/*.main.js'),
path.join(conf.paths.src, '/app/**/*.js'),
path.join('!' + conf.paths.src, '/app/dataTables/*.js'),
path.join('!' + conf.paths.src, '/app/**/bootstrap.js'),
path.join('!' + conf.paths.src, '/app/**/quick-sidebar.js'),
path.join('!' + conf.paths.src, '/app/**/app.js'),
path.join('!' + conf.paths.src, '/app/**/layout.js'),
path.join('!' + conf.paths.src, '/app/**/*.spec.js'),
path.join('!' + conf.paths.src, '/app/**/*.mock.js'),
path.join('!' + conf.paths.src, '/app/**/jstree.min.js'),
path.join('!' + conf.paths.src, '/app/**/ngJsTree.min.js'),
path.join('!' + conf.paths.src, '/app/**/ng-infinite-scroll.min.js'),
path.join('!' + conf.paths.src, '/app/**/bootstrap-switch.js')
])
.pipe($.angularFilesort()).on('error', conf.errorHandler('AngularFilesort'));
// var injectCustomScripts = gulp.src([
// path.join(conf.paths.src, '/app/js/app.js'),
// path.join(conf.paths.src, '/app/js/layout.js'),
// path.join(conf.paths.src, '/app/js/quick-sidebar.js')
// ]).pipe($.angularFilesort()).on('error', conf.errorHandler('AngularFilesort'));
var injectOptions = {
ignorePath: [conf.paths.src, path.join(conf.paths.tmp, '/serve')],
addRootSlash: false<|fim▁hole|> };
return gulp.src(path.join(conf.paths.src, '/*.html'))
.pipe($.inject(injectStyles, injectOptions))
.pipe($.inject(injectScripts, injectOptions))
.pipe(wiredep(_.extend({}, conf.wiredep)))
.pipe(gulp.dest(path.join(conf.paths.tmp, '/serve')));
});<|fim▁end|> | |
<|file_name|>upload.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
// MIT License. See license.txt
// parent, args, callback
frappe.upload = {
make: function(opts) {
if(!opts.args) opts.args = {};
if(opts.allow_multiple === undefined) {
opts.allow_multiple = 1
}
// whether to show public/private checkbox or not
opts.show_private = !("is_private" in opts);
// make private by default
if (!("options" in opts) || ("options" in opts &&
(opts.options && !opts.options.toLowerCase()=="public" && !opts.options.toLowerCase()=="image"))) {
opts.is_private = 1;
}
var d = null;
// create new dialog if no parent given
if(!opts.parent) {
d = new frappe.ui.Dialog({
title: __('Upload Attachment'),
primary_action_label: __('Attach'),
primary_action: function() {}
});
opts.parent = d.body;
opts.btn = d.get_primary_btn();
d.show();
}
var $upload = $(frappe.render_template("upload", {opts:opts})).appendTo(opts.parent);
var $file_input = $upload.find(".input-upload-file");
var $uploaded_files_wrapper = $upload.find('.uploaded-filename');
// bind pseudo browse button
$upload.find(".btn-browse").on("click",
function() { $file_input.click(); });
// dropzone upload
const $dropzone = $('<div style="padding: 20px 10px 0px 10px;"/>');
new frappe.ui.DropZone($dropzone, {
drop: function (files) {
$dropzone.hide();
opts.files = opts.files ? [...opts.files, ...files] : files;
$file_input.trigger('change');
}
});
// end dropzone
$upload.append($dropzone);
<|fim▁hole|> if (this.files.length > 0 || opts.files) {
var fileobjs = null;
if (opts.files) {
// files added programmatically
fileobjs = opts.files;
delete opts.files;
} else {
// files from input type file
fileobjs = $upload.find(":file").get(0).files;
}
var file_array = $.makeArray(fileobjs);
$upload.find(".web-link-wrapper").addClass("hidden");
$upload.find(".btn-browse").removeClass("btn-primary").addClass("btn-default");
$uploaded_files_wrapper.removeClass('hidden').empty();
$uploaded_files_wrapper.css({ 'margin-bottom': '25px' });
file_array = file_array.map(
file => Object.assign(file, {is_private: opts.is_private ? 1 : 0})
)
$upload.data('attached_files', file_array);
// List of files in a grid
$uploaded_files_wrapper.append(`
<div class="list-item list-item--head">
<div class="list-item__content list-item__content--flex-2">
${__('Filename')}
</div>
${opts.show_private
? `<div class="list-item__content file-public-column">
${__('Public')}
</div>`
: ''}
<div class="list-item__content list-item__content--activity" style="flex: 0 0 32px">
</div>
</div>
`);
var file_pills = file_array.map(
file => frappe.upload.make_file_row(file, opts)
);
$uploaded_files_wrapper.append(file_pills);
} else {
frappe.upload.show_empty_state($upload);
}
});
if(opts.files && opts.files.length > 0) {
$file_input.trigger('change');
}
// events
$uploaded_files_wrapper.on('click', '.list-item-container', function (e) {
var $item = $(this);
var filename = $item.attr('data-filename');
var attached_files = $upload.data('attached_files');
var $target = $(e.target);
if ($target.is(':checkbox')) {
var is_private = !$target.is(':checked');
attached_files = attached_files.map(file => {
if (file.name === filename) {
file.is_private = is_private ? 1 : 0;
}
return file;
});
$uploaded_files_wrapper
.find(`.list-item-container[data-filename="${filename}"] .fa.fa-fw`)
.toggleClass('fa-lock fa-unlock-alt');
$upload.data('attached_files', attached_files);
}
else if ($target.is('.uploaded-file-remove, .fa-remove')) {
// remove file from attached_files object
attached_files = attached_files.filter(file => file.name !== filename);
$upload.data('attached_files', attached_files);
// remove row from dom
$uploaded_files_wrapper
.find(`.list-item-container[data-filename="${filename}"]`)
.remove();
if(attached_files.length === 0) {
frappe.upload.show_empty_state($upload);
}
}
});
if(!opts.btn) {
opts.btn = $('<button class="btn btn-default btn-sm attach-btn">' + __("Attach")
+ '</div>').appendTo($upload);
} else {
$(opts.btn).unbind("click");
}
// Primary button handler
opts.btn.click(function() {
// close created dialog
d && d.hide();
// convert functions to values
if(opts.get_params) {
opts.args.params = opts.get_params();
}
// Get file url if input is visible
var file_url = $upload.find('[name="file_url"]:visible');
file_url = file_url.length && file_url.get(0).value;
if(opts.args.gs_template) {
frappe.integration_service.gsuite.create_gsuite_file(opts.args,opts);
} else if(file_url) {
opts.args.file_url = file_url;
frappe.upload.upload_file(null, opts.args, opts);
} else {
var files = $upload.data('attached_files');
frappe.upload.upload_multiple_files(files, opts.args, opts);
}
});
},
make_file_row: function(file, { show_private } = {}) {
var template = `
<div class="list-item-container" data-filename="${file.name}">
<div class="list-item">
<div class="list-item__content list-item__content--flex-2 ellipsis">
<span>${file.name}</span>
<span style="margin-top: 1px; margin-left: 5px;"
class="fa fa-fw text-warning ${file.is_private ? 'fa-lock': 'fa-unlock-alt'}">
</span>
</div>
${show_private?
`<div class="list-item__content file-public-column ellipsis">
<input type="checkbox" ${!file.is_private ? 'checked' : ''}/></div>`
: ''}
<div class="list-item__content list-item__content--activity ellipsis" style="flex: 0 0 32px;">
<button class="btn btn-default btn-xs text-muted uploaded-file-remove">
<span class="fa fa-remove"></span>
</button>
</div>
</div>
</div>`;
return $(template);
},
show_empty_state: function($upload) {
$upload.find(".uploaded-filename").addClass("hidden");
$upload.find(".web-link-wrapper").removeClass("hidden");
$upload.find(".private-file").addClass("hidden");
$upload.find(".btn-browse").removeClass("btn-default").addClass("btn-primary");
},
upload_multiple_files: function(files /*FileData array*/, args, opts) {
var i = -1;
frappe.upload.total_files = files ? files.length : 0;
// upload the first file
upload_next();
// subsequent files will be uploaded after
// upload_complete event is fired for the previous file
$(document).on('upload_complete', on_upload);
function upload_next() {
if(files) {
i += 1;
var file = files[i];
args.is_private = file.is_private;
if(!opts.progress) {
frappe.show_progress(__('Uploading'), i, files.length);
}
}
frappe.upload.upload_file(file, args, opts);
}
function on_upload(e, attachment) {
if (!files || i === files.length - 1) {
$(document).off('upload_complete', on_upload);
frappe.hide_progress();
return;
}
upload_next();
}
},
upload_file: function(fileobj, args, opts) {
if(!fileobj && !args.file_url) {
if(opts.on_no_attach) {
opts.on_no_attach();
} else {
frappe.msgprint(__("Please attach a file or set a URL"));
}
return;
}
if(fileobj) {
frappe.upload.read_file(fileobj, args, opts);
} else {
// with file_url
frappe.upload._upload_file(fileobj, args, opts);
}
},
_upload_file: function(fileobj, args, opts, dataurl) {
if (args.file_size) {
frappe.upload.validate_max_file_size(args.file_size);
}
if(opts.on_attach) {
opts.on_attach(args, dataurl)
} else {
if (opts.confirm_is_private) {
frappe.prompt({
label: __("Private"),
fieldname: "is_private",
fieldtype: "Check",
"default": 1
}, function(values) {
args["is_private"] = values.is_private;
frappe.upload.upload_to_server(fileobj, args, opts);
}, __("Private or Public?"));
} else {
if (!("is_private" in args) && "is_private" in opts) {
args["is_private"] = opts.is_private;
}
frappe.upload.upload_to_server(fileobj, args, opts);
}
}
},
read_file: function(fileobj, args, opts) {
args.filename = fileobj.name.split(' ').join('_');
args.file_url = null;
if(opts.options && opts.options.toLowerCase()=="image") {
if(!frappe.utils.is_image_file(args.filename)) {
frappe.msgprint(__("Only image extensions (.gif, .jpg, .jpeg, .tiff, .png, .svg) allowed"));
return;
}
}
let start_complete = frappe.cur_progress ? frappe.cur_progress.percent : 0;
var upload_with_filedata = function() {
let freader = new FileReader();
freader.onload = function() {
var dataurl = freader.result;
args.filedata = freader.result.split(",")[1];
args.file_size = fileobj.size;
frappe.upload._upload_file(fileobj, args, opts, dataurl);
};
freader.readAsDataURL(fileobj);
}
const file_not_big_enough = fileobj.size <= 24576;
if (opts.no_socketio || frappe.flags.no_socketio || file_not_big_enough) {
upload_with_filedata();
return;
}
frappe.socketio.uploader.start({
file: fileobj,
filename: args.filename,
is_private: args.is_private,
fallback: () => {
// if fails, use old filereader
upload_with_filedata();
},
callback: (data) => {
args.file_url = data.file_url;
frappe.upload._upload_file(fileobj, args, opts);
},
on_progress: (percent_complete) => {
let increment = (flt(percent_complete) / frappe.upload.total_files);
frappe.show_progress(__('Uploading'),
start_complete + increment);
}
});
},
upload_to_server: function(file, args, opts) {
if(opts.start) {
opts.start();
}
var ajax_args = {
"method": "uploadfile",
args: args,
callback: function(r) {
if(!r._server_messages) {
// msgbox.hide();
}
if(r.exc) {
// if no onerror, assume callback will handle errors
opts.onerror ? opts.onerror(r) : opts.callback(null, r);
frappe.hide_progress();
return;
}
var attachment = r.message;
opts.loopcallback && opts.loopcallback();
opts.callback && opts.callback(attachment, r);
$(document).trigger("upload_complete", attachment);
},
error: function(r) {
// if no onerror, assume callback will handle errors
opts.onerror ? opts.onerror(r) : opts.callback(null, null, r);
frappe.hide_progress();
return;
}
}
// copy handlers etc from opts
$.each(['queued', 'running', "progress", "always", "btn"], function(i, key) {
if(opts[key]) ajax_args[key] = opts[key];
});
return frappe.call(ajax_args);
},
get_string: function(dataURI) {
// remove filename
var parts = dataURI.split(',');
if(parts[0].indexOf(":")===-1) {
var a = parts[2];
} else {
var a = parts[1];
}
return decodeURIComponent(escape(atob(a)));
},
validate_max_file_size: function(file_size) {
var max_file_size = frappe.boot.max_file_size || 5242880;
if (file_size > max_file_size) {
// validate max file size
frappe.throw(__("File size exceeded the maximum allowed size of {0} MB", [max_file_size / 1048576]));
}
},
multifile_upload:function(fileobjs, args, opts={}) {
//loop through filenames and checkboxes then append to list
var fields = [];
for (var i =0,j = fileobjs.length;i<j;i++) {
var filename = fileobjs[i].name;
fields.push({'fieldname': 'label1', 'fieldtype': 'Heading', 'label': filename});
fields.push({'fieldname': filename+'_is_private', 'fieldtype': 'Check', 'label': 'Private', 'default': 1});
}
var d = new frappe.ui.Dialog({
'title': __('Make file(s) private or public?'),
'fields': fields,
primary_action: function(){
var i =0,j = fileobjs.length;
d.hide();
opts.loopcallback = function (){
if (i < j) {
args.is_private = d.fields_dict[fileobjs[i].name + "_is_private"].get_value();
frappe.upload.upload_file(fileobjs[i], args, opts);
i++;
}
};
opts.loopcallback();
}
});
d.show();
opts.confirm_is_private = 0;
},
create_gsuite_file: function(args, opts) {
return frappe.call({
type:'POST',
method: 'frappe.integrations.doctype.gsuite_templates.gsuite_templates.create_gsuite_doc',
args: args,
callback: function(r) {
var attachment = r.message;
opts.callback && opts.callback(attachment, r);
}
});
}
}<|fim▁end|> | $file_input.on("change", function() { |
<|file_name|>setfocus-directive.ts<|end_file_name|><|fim▁begin|>export class SetFocus {
static $depends: string[] = [];
static module: ng.IModule = angular.module(
'SetFocus', SetFocus.$depends
).directive('setFocus', ['$parse', SetFocus.directive]);
static directive($parse: ng.IParseService): angular.IDirective {
return {
restrict: 'A',
replace: false,
scope: false,
link: { post: function(
scope: ng.IScope,<|fim▁hole|> ) {
let element = <HTMLInputElement>$element[0];
let setFocus = $parse(attrs['setFocus']);
scope.$watch(setFocus, (newVal: any, oldVal: any) => {
if (!!newVal) {
element.focus();
}
});
}}
};
}
}<|fim▁end|> | $element: ng.IAugmentedJQuery,
attrs: ng.IAttributes |
<|file_name|>Wildfly8Redis32ModuleControllerTestIT.java<|end_file_name|><|fim▁begin|>/*
* LICENCE : CloudUnit is available under the Affero Gnu Public License GPL V3 : https://www.gnu.org/licenses/agpl-3.0.html
* but CloudUnit is licensed too under a standard commercial license.
* Please contact our sales team if you would like to discuss the specifics of our Enterprise license.
* If you are not sure whether the GPL is right for you,
* you can always test our software under the GPL and inspect the source code before you contact us
* about purchasing a commercial license.
*
* LEGAL TERMS : "CloudUnit" is a registered trademark of Treeptik and can't be used to endorse
* or promote products derived from this project without prior written permission from Treeptik.
* Products or services derived from this software may not be called "CloudUnit"
* nor may "Treeptik" or similar confusing terms appear in their names without prior written permission.
* For any questions, contact us : [email protected]
*/
package fr.treeptik.cloudunit.modules.redis;
import fr.treeptik.cloudunit.modules.AbstractModuleControllerTestIT;
/**
* Created by guillaume on 01/10/16.
*/
public class Wildfly8Redis32ModuleControllerTestIT extends AbstractModuleControllerTestIT {<|fim▁hole|> public Wildfly8Redis32ModuleControllerTestIT() {
super.server = "wildfly-8";
super.module = "redis-3-2";
super.numberPort = "6379";
super.managerPrefix = "";
super.managerSuffix = "";
super.managerPageContent = "";
}
@Override
protected void checkConnection(String forwardedPort) {
new CheckRedisConnection().invoke(forwardedPort);
}
}<|fim▁end|> | |
<|file_name|>PermissionRequired.java<|end_file_name|><|fim▁begin|>package org.apache.shiro.grails.annotations;
<|fim▁hole|>
import org.apache.shiro.authz.Permission;
@Target({ElementType.FIELD, ElementType.METHOD})
@Retention(RetentionPolicy.RUNTIME)
public @interface PermissionRequired {
Class<? extends Permission> type();
/**
* The name of the role required to be granted this authorization.
*/
String target() default "*";
String actions() default "";
}<|fim▁end|> | import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; |
<|file_name|>test_alloc_source.cpp<|end_file_name|><|fim▁begin|>#include "fixed/impl/basic_allocation_sources.hpp"
#include "fixed/impl/fixed_def.hpp"
#include "fixed/impl/fixed_type_traits.hpp"
#include "catch.hpp"
#include "test_struct.hpp"
TEST_CASE("test fixed allocation_source", "[alloc]")
{
static_assert(fixed::_impl::is_allocation_source<fixed::_impl::empty_source>::value, "empty source is supposed to be allocation_source");<|fim▁hole|> static_assert(!fixed::_impl::is_allocation_source<test>::value, "test is not supposed to be allocation_source");
static_assert(!fixed::_impl::is_allocation_source_v<test>, "test is not supposed to be allocation_source");
}<|fim▁end|> | static_assert(fixed::_impl::is_allocation_source_v<fixed::_impl::empty_source>, "empty source is supposed to be allocation_source");
|
<|file_name|>FileSystemNotFoundException.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing<|fim▁hole|> * permissions and limitations under the License.
*/
package com.amazonaws.services.elasticfilesystem.model;
import com.amazonaws.AmazonServiceException;
/**
* <p>
* Returned if the specified <code>FileSystemId</code> does not exist in the
* requester's AWS account.
* </p>
*/
public class FileSystemNotFoundException extends AmazonServiceException {
private static final long serialVersionUID = 1L;
private String errorCode;
/**
* Constructs a new FileSystemNotFoundException with the specified error
* message.
*
* @param message
* Describes the error encountered.
*/
public FileSystemNotFoundException(String message) {
super(message);
}
/**
* Sets the value of the ErrorCode property for this object.
*
* @param errorCode
* The new value for the ErrorCode property for this object.
*/
public void setErrorCode(String errorCode) {
this.errorCode = errorCode;
}
/**
* Returns the value of the ErrorCode property for this object.
*
* @return The value of the ErrorCode property for this object.
*/
public String getErrorCode() {
return this.errorCode;
}
/**
* Sets the value of the ErrorCode property for this object.
*
* @param errorCode
* The new value for the ErrorCode property for this object.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public FileSystemNotFoundException withErrorCode(String errorCode) {
setErrorCode(errorCode);
return this;
}
}<|fim▁end|> | |
<|file_name|>chapter.25.7.cpp<|end_file_name|><|fim▁begin|>//
// This is example code from Chapter 25.7 "Drill" of
// "Programming -- Principles and Practice Using C++" by Bjarne Stroustrup
//
#include <iostream>
using namespace std;
//------------------------------------------------------------------------------
int main()
{
// for int
{
int v = 1;
for (int i = 0; i<sizeof(v)*8; ++i)
{
cout << v << ' '; v <<=1;
}
}
// for unsigned int
{
unsigned int v = 1;
for (int i = 0; i<sizeof(v)*8; ++i)
{
cout << v << ' '; v <<=1;
}
}<|fim▁hole|>}
//------------------------------------------------------------------------------<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.