max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
370 | //------------------------------------------------------------------------------
// tri_simple: compute the number of triangles in a graph (simplest method)
//------------------------------------------------------------------------------
// A bare-bones version of the many variants in tri_template.c, with no
// parallelism, no log-time binary search, no use of tri_lohi to cut the work.
// This function is most similar to tri_mark (sequential version), defined
// in tri_template.c.
// Computes the sum(sum((A*A).*A)), in MATLAB notation, where A is binary
// (only the pattern is present). Or, in GraphBLAS notation,
// C<A> = A*A followed by reduce(C) to scalar.
#include "tri_def.h"
int64_t tri_simple // # of triangles, or -1 if out of memory
(
const int64_t *restrict Ap, // column pointers, size n+1
const Index *restrict Ai, // row indices
const Index n // A is n-by-n
)
{
bool *restrict Mark = (bool *) calloc (n, sizeof (bool)) ;
if (Mark == NULL) return (-1) ;
int64_t ntri = 0 ;
for (Index j = 0 ; j < n ; j++)
{
// scatter A(:,j) into Mark
for (int64_t p = Ap [j] ; p < Ap [j+1] ; p++)
{
Mark [Ai [p]] = 1 ;
}
// compute sum(C(:,j)) where C(:,j) = (A * A(:,j)) .* Mark
for (int64_t p = Ap [j] ; p < Ap [j+1] ; p++)
{
const Index k = Ai [p] ;
// C(:,j) += (A(:,k) * A(k,j)) .* Mark
for (int64_t pa = Ap [k] ; pa < Ap [k+1] ; pa++)
{
// C(i,j) += (A(i,k) * A(k,j)) .* Mark
ntri += Mark [Ai [pa]] ;
}
}
for (int64_t p = Ap [j] ; p < Ap [j+1] ; p++)
{
Mark [Ai [p]] = 0 ;
}
}
free (Mark) ;
return (ntri) ;
}
| 821 |
1,350 | <gh_stars>1000+
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.ai.anomalydetector.models;
import com.azure.core.annotation.Fluent;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
/** The ChangePointDetectResponse model. */
@Fluent
public final class ChangePointDetectResponse {
/*
* Frequency extracted from the series, zero means no recurrent pattern has
* been found.
*/
@JsonProperty(value = "period", access = JsonProperty.Access.WRITE_ONLY)
private Integer period;
/*
* isChangePoint contains change point properties for each input point.
* True means an anomaly either negative or positive has been detected. The
* index of the array is consistent with the input series.
*/
@JsonProperty(value = "isChangePoint")
private List<Boolean> isChangePoint;
/*
* the change point confidence of each point
*/
@JsonProperty(value = "confidenceScores")
private List<Float> confidenceScores;
/**
* Get the period property: Frequency extracted from the series, zero means no recurrent pattern has been found.
*
* @return the period value.
*/
public Integer getPeriod() {
return this.period;
}
/**
* Get the isChangePoint property: isChangePoint contains change point properties for each input point. True means
* an anomaly either negative or positive has been detected. The index of the array is consistent with the input
* series.
*
* @return the isChangePoint value.
*/
public List<Boolean> getIsChangePoint() {
return this.isChangePoint;
}
/**
* Set the isChangePoint property: isChangePoint contains change point properties for each input point. True means
* an anomaly either negative or positive has been detected. The index of the array is consistent with the input
* series.
*
* @param isChangePoint the isChangePoint value to set.
* @return the ChangePointDetectResponse object itself.
*/
public ChangePointDetectResponse setIsChangePoint(List<Boolean> isChangePoint) {
this.isChangePoint = isChangePoint;
return this;
}
/**
* Get the confidenceScores property: the change point confidence of each point.
*
* @return the confidenceScores value.
*/
public List<Float> getConfidenceScores() {
return this.confidenceScores;
}
/**
* Set the confidenceScores property: the change point confidence of each point.
*
* @param confidenceScores the confidenceScores value to set.
* @return the ChangePointDetectResponse object itself.
*/
public ChangePointDetectResponse setConfidenceScores(List<Float> confidenceScores) {
this.confidenceScores = confidenceScores;
return this;
}
}
| 952 |
357 | /*
* Copyright (c) 2012-2015 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.vmware.identity.rest.idm.server.test.integration.resources;
import static org.junit.Assert.assertEquals;
import java.util.Locale;
import javax.ws.rs.container.ContainerRequestContext;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.vmware.identity.rest.core.server.authorization.Config;
import com.vmware.identity.rest.core.server.exception.client.NotFoundException;
import com.vmware.identity.rest.core.server.exception.server.NotImplementedError;
import com.vmware.identity.rest.idm.data.SolutionUserDTO;
import com.vmware.identity.rest.idm.server.resources.SolutionUserResource;
import com.vmware.identity.rest.idm.server.test.annotation.IntegrationTest;
/**
* Integration tests for Solution user Resource
*
* @author <NAME>
* @author <NAME>
*/
@Category(IntegrationTest.class)
@Ignore // ignored due to IDM process to library change, see PR 1780279.
public class SolutionUserResourceIT extends TestBase {
private static final String SOLUTION_USERNAME = "testSolutionUser";
private static final String SOLUTIONUSER_UPN_UNKNOWN_USERNAME = "unknownSolutionUser" + "@" + DEFAULT_SYSTEM_DOMAIN;
private static final String SOLUTIONUSER_UPN_UNKNOWN_TENANT = SOLUTION_USERNAME + "@" + "unknown.local";
private SolutionUserResource solutionUserResource;
private ContainerRequestContext request;
@Before
public void testSetUp() {
request = EasyMock.createMock(ContainerRequestContext.class);
EasyMock.expect(request.getLanguage()).andReturn(Locale.getDefault()).anyTimes();
EasyMock.expect(request.getHeaderString(Config.CORRELATION_ID_HEADER)).andReturn("test").anyTimes();
EasyMock.replay(request);
solutionUserResource = new SolutionUserResource(DEFAULT_TENANT, request, null);
solutionUserResource.setIDMClient(idmClient);
}
@Test
public void testGetSolutionUser() throws Exception {
try {
// Test setup [Create solution user]
solutionUserHelper.createSolutionUser(DEFAULT_SYSTEM_DOMAIN, SOLUTION_USERNAME);
// Retrieve solution user
SolutionUserDTO solutionUser = solutionUserResource.get(SOLUTION_USERNAME);
assertEquals(SOLUTION_USERNAME, solutionUser.getName());
assertEquals(DEFAULT_SYSTEM_DOMAIN, solutionUser.getDomain());
} finally {
solutionUserHelper.deleteSolutionUser(DEFAULT_SYSTEM_DOMAIN, SOLUTION_USERNAME);
}
}
@Test(expected = NotFoundException.class)
public void testGetSolutionUser_WithNonExistentTenant_ThrowsNotFoundEx() {
solutionUserResource = new SolutionUserResource("unknown.local", request, null);
solutionUserResource.setIDMClient(idmClient);
solutionUserResource.get(SOLUTION_USERNAME);
}
@Test(expected = NotFoundException.class)
public void testGetSolutionUser_WithUnknownUser_ThrowsNotFoundEx() {
solutionUserResource.get(SOLUTIONUSER_UPN_UNKNOWN_USERNAME);
}
@Test(expected = NotImplementedError.class)
public void testGetGroupsOfSolutionuser_ThrowsNotImplementedError() {
solutionUserResource.getGroups(SOLUTION_USERNAME, 200);
}
}
| 1,317 |
348 | {"nom":"Ozeville","circ":"1ère circonscription","dpt":"Manche","inscrits":107,"abs":44,"votants":63,"blancs":1,"nuls":3,"exp":59,"res":[{"nuance":"LR","nom":"<NAME>","voix":46},{"nuance":"REM","nom":"<NAME>","voix":13}]} | 88 |
466 | class Production(object):
def analyze(self, world):
"""Implement your analyzer here."""
def interpret(self, world):
"""Implement your interpreter here."""
class FuncCall(Production):
def __init__(self, token, params):
self.name = token[1]
self.params = params
self.token = token
def analyze(self, world):
self.params.analyze(world)
def interpret(self, world):
funcdef = world.functions[self.name]
funcdef.call(world, self.params)
def __repr__(self):
return f"FuncCall({self.name}: {self.params})"
class Parameters(Production):
def __init__(self, expressions):
self.expressions = expressions
def analyze(self, world):
for expr in self.expressions:
expr.analyze(world)
def interpret(self, world):
return [x.interpret(world) for x in self.expressions]
def __repr__(self):
return f"Parameters({self.expressions})"
class Expr(Production): pass
class NameExpr(Expr):
def __init__(self, token):
self.name = token[1]
self.token = token
def interpret(self, world):
# This should point at an IntExpr for now
ref = world.variables.get(self.name)
return ref.interpret(world)
def __repr__(self):
return f"NameExpr({self.name})"
class IntExpr(Expr):
def __init__(self, token):
self.integer = int(token[1])
self.token = token
def __repr__(self):
return f"IntExpr({self.integer})"
def interpret(self, world):
return self.integer
class AddExpr(Expr):
def __init__(self, left, right):
self.left = left
self.right = right
def analyze(self, world):
self.left.analyze(world)
self.right.analyze(world)
def interpret(self, world):
return self.left.interpret(world) + self.right.interpret(world)
def __repr__(self):
return f"AddExpr({self.left}, {self.right})"
class FuncDef(Production):
def __init__(self, token, params, body):
self.name = token[1]
self.params = params
self.body = body
self.token = token
def analyze(self, world):
world.functions[self.name] = self
def __repr__(self):
return f"FuncDef({self.name}({self.params}): {self.body}"
def call(self, world, params):
params = params or Parameters()
scope = world.clone()
for i, p in enumerate(self.params.expressions):
scope.variables[p.name] = params.expressions[i]
for line in self.body:
line.interpret(scope)
class PrintFuncDef(Production):
def call(self, world, params):
print(*params.interpret(world))
| 1,146 |
862 | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <nlohmann/json.hpp>
#include "neural_network_from_json.hpp"
#if defined (TDS_ENABLE_OSQP) || defined (TDS_ENABLE_QPOASES)
#include "osqp_mpc_controller.hpp"
#endif
typedef double MyScalar;
typedef ::TINY::DoubleUtils MyTinyConstants;
typedef TinyAlgebra<double, MyTinyConstants> MyAlgebra;
using std::vector;
namespace tds {
namespace {
void ExtendVector(vector<MyScalar>& v1, const vector<MyScalar> v2) {
v1.insert(v1.end(), v2.begin(), v2.end());
}
}
class TorqueStanceLegController {
public:
TorqueStanceLegController(SimpleRobot* robot, bool use_cpp_mpc) :
robot_(robot), use_cpp_mpc_(use_cpp_mpc) {
if (!use_cpp_mpc_) {
std::string nn_json_filename;
tds::FileUtils::find_file("mpc_ffn_model.json",
nn_json_filename);
std::ifstream weights_json_file(nn_json_filename);
nlohmann::json data;
weights_json_file >> data;
net_ = NeuralNetworkFromJson<
TinyAlgebra < double, TINY::DoubleUtils>>(data);
}
}
vector<MyScalar> GetAction(const vector<MyScalar>& desired_speed,
double desired_twisting_speed,
tds::SimpleRobot& robot,
const tds::COMVelocityEstimator&
com_velocity_estimator,
tds::OpenloopGaitGenerator&
openloop_gait_generator) {
vector<MyScalar> friction_coeffs = {0.45, 0.45, 0.45, 0.45};
vector<MyScalar> desired_com_position = {0.0, 0.0, 0.42};
vector<MyScalar> desired_com_velocity = {desired_speed[0],
desired_speed[1], 0.0};
vector<MyScalar> desired_com_roll_pitch_yaw = {0.0, 0.0, 0.0};
vector<MyScalar>
desired_com_angular_velocity = {0.0, 0.0, desired_twisting_speed};
vector<int> foot_contact_state;
for (tds::LegState state: openloop_gait_generator.GetDesiredLegState()) {
foot_contact_state.push_back(
state == tds::STANCE || state == tds::EARLY_CONTACT);
}
vector<MyScalar> mpc_input;
vector<MyScalar> com_vel = com_velocity_estimator.com_velocity_body_frame;
vector<MyScalar> com_roll_pitch_yaw = robot.GetBaseRollPitchYaw();
com_roll_pitch_yaw[2] = 0.0;
vector<MyScalar> com_angular_velocity = robot.GetBaseRollPitchYawRate();
vector<MyScalar> foot_positions_base_frame = robot
.GetFootPositionsInBaseFrame();
#if defined (TDS_ENABLE_OSQP) || defined (TDS_ENABLE_QPOASES)
if (use_cpp_mpc_) {
auto predicted_contact_forces = convex_mpc_.ComputeContactForces(
{0.0}, com_vel, com_roll_pitch_yaw, com_angular_velocity,
foot_contact_state, foot_positions_base_frame, friction_coeffs,
desired_com_position, desired_com_velocity,
desired_com_roll_pitch_yaw, desired_com_angular_velocity
);
vector<MyScalar> torque_output;
for (int leg_id = 0; leg_id < num_legs_; leg_id++) {
vector<MyScalar> leg_force = {
predicted_contact_forces[leg_id * 3],
predicted_contact_forces[leg_id * 3 + 1],
predicted_contact_forces[leg_id * 3 + 2]};
vector<MyScalar> motor_torques = robot_->MapContactForceToJointTorques(
leg_id, leg_force);
ExtendVector(torque_output, motor_torques);
}
return torque_output;
} else
#endif
{
vector<MyScalar> mpc_input;
ExtendVector(mpc_input, com_vel);
ExtendVector(mpc_input, com_roll_pitch_yaw);
ExtendVector(mpc_input, com_angular_velocity);
vector<MyScalar> foot_contact_state_double;
for (int state: foot_contact_state) {
foot_contact_state_double.push_back(double(state));
}
ExtendVector(mpc_input, foot_contact_state_double);
ExtendVector(mpc_input, foot_positions_base_frame);
ExtendVector(mpc_input, friction_coeffs);
ExtendVector(mpc_input, desired_com_position);
ExtendVector(mpc_input, desired_com_velocity);
ExtendVector(mpc_input, desired_com_roll_pitch_yaw);
ExtendVector(mpc_input, desired_com_angular_velocity);
vector<MyScalar> mpc_stance_torque_output;
net_.compute(mpc_input, mpc_stance_torque_output);
return mpc_stance_torque_output;
}
}
private:
bool use_cpp_mpc_;
SimpleRobot* robot_;
NeuralNetworkFromJson<MyAlgebra> net_;
int num_legs_ = 4;
#if defined (TDS_ENABLE_OSQP) || defined (TDS_ENABLE_QPOASES)
ConvexMpc convex_mpc_ = ConvexMpc(
/*mass=*/220.0 / 9.8,
/*inertia=*/{0.07335, 0, 0, 0, 0.25068, 0, 0, 0, 0.25447},
/*num_legs=*/num_legs_,
/*planning_horizon=*/10,
/*timestep=*/0.025,
/*qp_weights=*/{5, 5, 0.2, 0, 0, 10, 0., 0., 1., 1., 1., 0., 0}
);
#endif
};
} // namespace tds
| 2,454 |
435 | <gh_stars>100-1000
{
"description": "<NAME>, <NAME>\nhttp://lanyrd.com/2015/writethedocs/sdmwxp/\nA software tester can be a tech writer\u2019s best friend, and vice versa. Jody (writer) and Arthur (tester) work together on APIs at Salesforce.com, and we\u2019ll talk about the tools and techniques we use to improve the quality of both our software and our documentation at the same time.\nWhile our APIs are still in development, we gather feedback internally. We established an API design review board to approve every API change -- this made a huge difference in ensuring that we offer a consistent, easy-to-consume programmatic interface to our users. We also conduct regular \u201cdogfooding\u201d sessions in which users are provided with draft documentation and asked to find both doc and product bugs.\nOur testers created an automated mechanism to alert us of any API changes in case anything slipped past the review board. It\u2019s proven to be invaluable for both doc and testing to keep up with the various teams who are building functionality into the API.\nOnce our APIs are publicly available, we take pride in listening to our users in help forums, on Twitter, and through pilot programs. We\u2019ve clarified our documentation and added test cases numerous times based on customer pain points.",
"language": "eng",
"recorded": "2015-05-18",
"related_urls": [
"http://lanyrd.com/2015/writethedocs/sdmwxp/"
],
"speakers": [
"<NAME>",
"<NAME>"
],
"thumbnail_url": "https://i.ytimg.com/vi/W5Un3r-vG_I/hqdefault.jpg",
"title": "Writer, Meet Tester",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=W5Un3r-vG_I"
}
]
}
| 506 |
1,370 | <gh_stars>1000+
package ser.offheaplatency;
import java.io.*;
import java.util.*;
import org.nustaq.offheap.*;
import org.nustaq.serialization.simpleapi.*;
public final class MyFSTSerializer{
private final boolean toStore;
private final String fileName;
private final long memorySize;
private final FSTCoder fastCoder;
private final FSTLongOffheapMap<MktDataEvent> offHeapMap;
public MyFSTSerializer( boolean toStore, String location, String journalName, FSTCoder fastCoder, long memorySize, int count ) throws Exception{
this.toStore = toStore;
this.fileName = location + File.separator + journalName + ".mmf";
this.memorySize = memorySize;
this.fastCoder = fastCoder;
this.offHeapMap = new FSTLongOffheapMap<>( fileName, memorySize, count, fastCoder );
// this.offHeapMap = new FSTLongOffheapMap<>( memorySize, 2*count, fastCoder );
}
public final boolean toStore( ){
return toStore;
}
public final String getFilename( ){
return fileName;
}
public final void start( ){
fastCoder.getConf().setCrossPlatform( false );
fastCoder.getConf().setPreferSpeed( true );
fastCoder.getConf().setShareReferences( false );
fastCoder.getConf().registerClass( Long.class, MktDataEvent.class );
System.out.println("Journaling started at " + fileName + " with Memory " + memorySize ) ;
}
public final void storeEvent( MktDataEvent event ){
offHeapMap.put( event.getSequenceId(), event );
}
public final Collection<MktDataEvent> retrieveAllEvents( ){
Map<Long, MktDataEvent> retrievedMap = new LinkedHashMap<>();
for( Iterator<MktDataEvent> iterator = offHeapMap.values(); iterator.hasNext(); ){
MktDataEvent event = (MktDataEvent) iterator.next();
retrievedMap.put( event.getSequenceId(), event );
}
return retrievedMap.values();
}
public final void stop( ){
try{
offHeapMap.free( );
System.out.println("Stopped Journal and freed memory." );
}catch( Exception e ){
e.printStackTrace( );
}
}
} | 894 |
366 | <filename>networkit/cpp/centrality/GroupClosenessLocalSwaps.cpp
#include <algorithm>
#include <cassert>
#include <omp.h>
#include <queue>
#include <networkit/auxiliary/Random.hpp>
#include <networkit/centrality/GroupClosenessLocalSwaps.hpp>
namespace NetworKit {
GroupClosenessLocalSwaps::GroupClosenessLocalSwaps(const Graph &G, const std::vector<node> &group,
count maxSwaps)
: GroupClosenessLocalSwaps(G, group.begin(), group.end(), maxSwaps) {}
void GroupClosenessLocalSwaps::init() {
const auto n = G->upperNodeIdBound();
distance.assign(n, 0);
gamma.assign(n * group.size(), false);
visited.assign(n, false);
idxMap.clear();
idxMap.reserve(group.size());
stack.assign(n, 0);
farness.assign(group.size(), 0);
farnessDecrease.assign(group.size(), 0);
sumOfMins.assign(n, 0);
intDistributions.resize(omp_get_max_threads());
for (size_t i = 0; i < group.size(); ++i) {
const auto u = group[i];
idxMap[u] = i;
gamma[u * group.size() + i] = 1;
}
#ifdef __AVX2__
randVec.resize(n);
#else
randVec.resize(K * n);
#endif // __AVX2__
totalSwaps = 0;
hasRun = false;
}
void GroupClosenessLocalSwaps::run() {
init();
while (findAndSwap() && ++totalSwaps < maxSwaps)
; // Keep iterating.
hasRun = true;
}
bool GroupClosenessLocalSwaps::findAndSwap() {
bfsFromGroup();
// Among the vertices outside the group but neighbors of a vertex in the group, the one that
// minimizes the farness of the group.
const node v = estimateHighestDecrease();
const auto farnessDecreaseV = computeFarnessDecrease(v);
int64_t improvement = 0;
node u = none;
// Find the neighbor u of v s.t., u is in the group and, if swapped with v, maximizes the
// decrease of the farness of the group.
G->forNeighborsOf(v, [&](const node y) {
if (distance[y] == 0) {
const auto idx = idxMap.at(y);
const auto curImprovement = farnessDecreaseV - farness[idx] + farnessDecrease[idx];
if (curImprovement > improvement) {
improvement = curImprovement;
u = y;
}
}
});
if (improvement <= 0)
return false;
// Remove v from the group, add u.
const auto idxU = idxMap.at(u);
idxMap.erase(u);
idxMap[v] = idxU;
resetGamma(v, idxU);
return true;
}
void GroupClosenessLocalSwaps::bfsFromGroup() {
std::queue<node> q;
std::fill(visited.begin(), visited.end(), false);
stackSize = 0;
for (const auto &idx : idxMap) {
q.push(idx.first);
farness[idx.second] = 1;
distance[idx.first] = 0;
visited[idx.first] = true;
}
do {
const auto u = q.front();
q.pop();
bool uIsLeaf = false;
G->forNeighborsOf(u, [&](const node v) {
// Whether v is in \Gamma_u i.e., the shortest path from S to v is realized only
// by u.
bool inGamma = true;
// Whether the node in the group that realizes the shortest distance to v has
// been found.
bool nearestNodeFound = false;
// Index of the node in the group that realizes the shortest distance to v.
index groupIdx = none;
if (!visited[v]) {
uIsLeaf = false;
distance[v] = distance[u] + 1;
visited[v] = true;
q.push(v);
for (size_t i = 0; i < group.size(); ++i) {
const auto curGamma = gamma[group.size() * u + i];
if (curGamma) {
if (!nearestNodeFound) {
nearestNodeFound = true;
groupIdx = i;
} else
inGamma = false;
}
gamma[group.size() * v + i] = curGamma;
}
if (inGamma)
++farness[groupIdx];
} else if (distance[u] + 1 == distance[v]) {
inGamma = true;
nearestNodeFound = false;
bool subtract = false;
for (size_t i = 0; i < group.size(); ++i) {
if (gamma[group.size() * v + i]) {
if (!nearestNodeFound) {
nearestNodeFound = true;
groupIdx = i;
} else {
inGamma = false;
break;
}
} else if (gamma[group.size() * u + i]) {
gamma[group.size() * v + i] = 1;
subtract = true;
}
}
if (inGamma && subtract)
--farness[groupIdx];
}
});
if (distance[u] != 0 && (!uIsLeaf || distance[u] == count{1}))
stack[stackSize++] = u;
} while (!q.empty());
}
int64_t GroupClosenessLocalSwaps::computeFarnessDecrease(node v) {
std::fill(visited.begin(), visited.end(), false);
std::queue<node> q;
q.push(v);
distance[v] = 0;
visited[v] = true;
int64_t decrease{1};
std::fill(farnessDecrease.begin(), farnessDecrease.end(), int64_t{0});
do {
const auto u = q.front();
q.pop();
bool inGamma = false;
index groupIdx;
for (size_t i = 0; i < group.size(); ++i) {
if (gamma[group.size() * u + i]) {
if (!inGamma) {
inGamma = true;
groupIdx = i;
} else {
inGamma = false;
break;
}
}
}
if (inGamma)
++farnessDecrease[groupIdx];
G->forNeighborsOf(u, [&](const node v) {
if (visited[v])
return;
if (distance[u] + 1 <= distance[v]) {
if (distance[u] + 1 < distance[v]) {
distance[v] = distance[u] + 1;
++decrease;
}
q.push(v);
}
visited[v] = true;
});
} while (!q.empty());
return decrease;
}
void GroupClosenessLocalSwaps::initRandomVector() {
#pragma omp parallel
{
auto &urng = Aux::Random::getURNG();
#pragma omp for
for (omp_index i = 0; i < static_cast<omp_index>(G->upperNodeIdBound()); ++i) {
const node u = static_cast<node>(i);
if (!G->hasNode(u))
continue;
// Avoid to generate numbers for nodes in the group
if (distance[u] > 0) {
auto tid = omp_get_thread_num();
auto &distr = intDistributions[tid];
#ifdef __AVX2__
// Generating two 16-bit random integers per time
for (index j = 0; j < K; j += 2) {
const auto x = distr(urng);
randVec[u].items[j] = static_cast<uint16_t>(x);
randVec[u].items[j + 1] = static_cast<uint16_t>(x >> K);
}
randVec[u].vec = *(__m256i *)(&randVec[u].items[0]);
#else
// Generating two 16-bit random integers per time
for (index j = 0; j < K; j += 2) {
const auto x = distr(urng);
randVec[K * u + j] = static_cast<uint16_t>(x);
randVec[K * u + j + 1] = static_cast<uint16_t>(x >> K);
}
#endif // __AVX2__
}
}
}
}
node GroupClosenessLocalSwaps::estimateHighestDecrease() {
initRandomVector();
float bestEstimate = -1.f;
node v = none;
for (count i = 0; i < stackSize; ++i) {
const auto x = stack[stackSize - 1 - i];
#ifdef __AVX2__
// 16 randomly generated integers;
__m256i &x1 = randVec[x].vec;
// Pulling leaves
G->forNeighborsOf(x, [&](const node y) {
if (distance[y] == distance[x] + 1) {
const __m256i &y1 = randVec[y].vec;
x1 = _mm256_min_epu16(x1, y1);
}
});
*(__m256i *)(&randVec[x].items) = x1;
#else
// 16 random 16-bit integers are realized by 4 64-bit random integers
G->forNeighborsOf(x, [&](const node y) {
if (distance[y] == distance[x] + 1)
for (index i = 0; i < K; ++i)
randVec[K * x + i] = std::min(randVec[K * x + i], randVec[K * y + i]);
});
#endif // __AVX2__
if (distance[x] == 1) {
sumOfMins[x] = 0;
for (index j = 0; j < K; ++j) {
#ifdef __AVX2__
sumOfMins[x] += randVec[x].items[j];
#else
sumOfMins[x] += randVec[K * x + j];
#endif // __AVX2__
}
if (!sumOfMins[x])
sumOfMins[x] = 1;
}
}
G->forNodes([&](const node x) {
if (distance[x] == 1) {
float estimate =
static_cast<float>(K) / (static_cast<float>(sumOfMins[x]) / maxInt16) - 1.f;
if (estimate > bestEstimate) {
v = x;
bestEstimate = estimate;
}
}
});
assert(v != none);
return v;
}
std::vector<node> GroupClosenessLocalSwaps::groupMaxCloseness() const {
assureFinished();
std::vector<node> maxGroup;
maxGroup.reserve(group.size());
for (const auto &entry : idxMap)
maxGroup.push_back(entry.first);
return maxGroup;
}
count GroupClosenessLocalSwaps::numberOfSwaps() const {
assureFinished();
return totalSwaps;
}
void GroupClosenessLocalSwaps::resetGamma(node x, index idx) {
std::fill(gamma.begin() + group.size() * x, gamma.begin() + group.size() * (x + 1), false);
gamma[group.size() * x + idx] = true;
}
} // namespace NetworKit
| 5,286 |
348 | {"nom":"Bartrès","circ":"2ème circonscription","dpt":"Hautes-Pyrénées","inscrits":378,"abs":191,"votants":187,"blancs":25,"nuls":3,"exp":159,"res":[{"nuance":"REM","nom":"<NAME>","voix":93},{"nuance":"RDG","nom":"<NAME>","voix":66}]} | 97 |
2,291 | <reponame>M-i-k-e-l/osmdroid<filename>issues/osmdroid_issue405.json<gh_stars>1000+
{
"id" : 405,
"status" : "New",
"summary" : "enhancement for gpx creator",
"labels" : [ "Type-Defect", "Priority-Medium" ],
"stars" : 0,
"commentCount" : 1,
"comments" : [ {
"id" : 0,
"commenterId" : -2060118034196697627,
"content" : "hi,\r\ncould you please allow to create gpx file from classes that extend RecordedGeoPoint?\r\n\r\npublic static String create(final List<RecordedGeoPoint> someRecords)\r\n\r\nchange to\r\n\r\npublic static String create(final List<? extends RecordedGeoPoint> someRecords)\r\n\r\nin\r\n\r\npublic class RecordedRouteGPXFormatter implements OpenStreetMapContributorConstants\r\n",
"timestamp" : 1361289771,
"attachments" : [ ]
} ]
} | 318 |
682 | <filename>vpr/src/place/place_timing_update.h
/**
* @file place_timing_update.h
* @brief Timing update routines used by the VPR placer.
*/
#pragma once
#include "timing_place.h"
#include "place_util.h"
///@brief Initialize the timing information and structures in the placer.
void initialize_timing_info(const PlaceCritParams& crit_params,
const PlaceDelayModel* delay_model,
PlacerCriticalities* criticalities,
PlacerSetupSlacks* setup_slacks,
ClusteredPinTimingInvalidator* pin_timing_invalidator,
SetupTimingInfo* timing_info,
t_placer_costs* costs);
///@brief Updates every timing related classes, variables and structures.
void perform_full_timing_update(const PlaceCritParams& crit_params,
const PlaceDelayModel* delay_model,
PlacerCriticalities* criticalities,
PlacerSetupSlacks* setup_slacks,
ClusteredPinTimingInvalidator* pin_timing_invalidator,
SetupTimingInfo* timing_info,
t_placer_costs* costs);
///@brief Update timing information based on the current block positions.
void update_timing_classes(const PlaceCritParams& crit_params,
SetupTimingInfo* timing_info,
PlacerCriticalities* criticalities,
PlacerSetupSlacks* setup_slacks,
ClusteredPinTimingInvalidator* pin_timing_invalidator);
///@brief Updates the timing driven (td) costs.
void update_timing_cost(const PlaceDelayModel* delay_model,
const PlacerCriticalities* criticalities,
double* timing_cost);
///@brief Incrementally updates timing cost based on the current delays and criticality estimates.
void update_td_costs(const PlaceDelayModel* delay_model, const PlacerCriticalities& place_crit, double* timing_cost);
///@brief Recomputes timing cost from scratch based on the current delays and criticality estimates.
void comp_td_costs(const PlaceDelayModel* delay_model, const PlacerCriticalities& place_crit, double* timing_cost);
/**
* @brief Commit all the setup slack values from the PlacerSetupSlacks
* class to `connection_setup_slack`.
*/
void commit_setup_slacks(const PlacerSetupSlacks* setup_slacks);
///@brief Verify that the values in `connection_setup_slack` matches PlacerSetupSlacks.
bool verify_connection_setup_slacks(const PlacerSetupSlacks* setup_slacks);
| 1,159 |
749 | from roboticstoolbox.robot.Robot import Robot
from roboticstoolbox.robot.Link import Link
from roboticstoolbox.robot.DHRobot import SerialLink, DHRobot
from roboticstoolbox.robot.DHLink import (
DHLink,
RevoluteDH,
PrismaticDH,
RevoluteMDH,
PrismaticMDH,
)
from roboticstoolbox.robot.ERobot import ERobot, ERobot2
from roboticstoolbox.robot.ELink import ELink, ELink2
from roboticstoolbox.robot.ETS import ETS, ETS2
from roboticstoolbox.robot.Gripper import Gripper
from roboticstoolbox.robot.KinematicCache import KinematicCache
__all__ = [
"Robot",
"SerialLink",
"DHRobot",
"Link",
"DHLink",
"RevoluteDH",
"PrismaticDH",
"RevoluteMDH",
"PrismaticMDH",
"ERobot",
"ELink",
"ELink2",
"ERobot",
"ERobot2",
"ETS",
"ETS2",
"Gripper",
"KinematicCache",
]
| 369 |
422 | <filename>geotorch/exceptions.py
class VectorError(ValueError):
def __init__(self, name, size):
super().__init__(
"Cannot instantiate {} on a tensor of less than 2 dimensions. "
"Got a tensor of size {}".format(name, size)
)
class InverseError(ValueError):
def __init__(self, M):
super().__init__(
"Cannot initialize the parametrization {} as no inverse for the function "
"{} was specified in the constructor".format(M, M.f.__name__)
)
class NonSquareError(ValueError):
def __init__(self, name, size):
super().__init__(
"The {} parametrization can just be applied to square matrices. "
"Got a tensor of size {}".format(name, size)
)
class RankError(ValueError):
def __init__(self, n, k, rank):
super().__init__(
"The rank has to be 1 <= rank <= min({}, {}). Found {}".format(n, k, rank)
)
class InManifoldError(ValueError):
def __init__(self, X, M):
super().__init__("Tensor not contained in {}. Got\n{}".format(M, X))
| 472 |
1,766 | <reponame>aalonsog/licode
// This file was GENERATED by command:
// pump.py callback.h.pump
// DO NOT EDIT BY HAND!!!
/*
* Copyright 2012 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// To generate callback.h from callback.h.pump, execute:
// /home/build/google3/third_party/gtest/scripts/pump.py callback.h.pump
// Callbacks are callable object containers. They can hold a function pointer
// or a function object and behave like a value type. Internally, data is
// reference-counted, making copies and pass-by-value inexpensive.
//
// Callbacks are typed using template arguments. The format is:
// CallbackN<ReturnType, ParamType1, ..., ParamTypeN>
// where N is the number of arguments supplied to the callable object.
// Callbacks are invoked using operator(), just like a function or a function
// object. Default-constructed callbacks are "empty," and executing an empty
// callback does nothing. A callback can be made empty by assigning it from
// a default-constructed callback.
//
// Callbacks are similar in purpose to std::function (which isn't available on
// all platforms we support) and a lightweight alternative to sigslots. Since
// they effectively hide the type of the object they call, they're useful in
// breaking dependencies between objects that need to interact with one another.
// Notably, they can hold the results of Bind(), std::bind*, etc, without
// needing
// to know the resulting object type of those calls.
//
// Sigslots, on the other hand, provide a fuller feature set, such as multiple
// subscriptions to a signal, optional thread-safety, and lifetime tracking of
// slots. When these features are needed, choose sigslots.
//
// Example:
// int sqr(int x) { return x * x; }
// struct AddK {
// int k;
// int operator()(int x) const { return x + k; }
// } add_k = {5};
//
// Callback1<int, int> my_callback;
// cout << my_callback.empty() << endl; // true
//
// my_callback = Callback1<int, int>(&sqr);
// cout << my_callback.empty() << endl; // false
// cout << my_callback(3) << endl; // 9
//
// my_callback = Callback1<int, int>(add_k);
// cout << my_callback(10) << endl; // 15
//
// my_callback = Callback1<int, int>();
// cout << my_callback.empty() << endl; // true
#ifndef WEBRTC_BASE_CALLBACK_H_
#define WEBRTC_BASE_CALLBACK_H_
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
namespace rtc {
template <class R>
class Callback0 {
public:
// Default copy operations are appropriate for this class.
Callback0() {}
template <class T> Callback0(const T& functor)
: helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
R operator()() {
if (empty())
return R();
return helper_->Run();
}
bool empty() const { return !helper_; }
private:
struct Helper : RefCountInterface {
virtual ~Helper() {}
virtual R Run() = 0;
};
template <class T> struct HelperImpl : Helper {
explicit HelperImpl(const T& functor) : functor_(functor) {}
virtual R Run() {
return functor_();
}
T functor_;
};
scoped_refptr<Helper> helper_;
};
template <class R,
class P1>
class Callback1 {
public:
// Default copy operations are appropriate for this class.
Callback1() {}
template <class T> Callback1(const T& functor)
: helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
R operator()(P1 p1) {
if (empty())
return R();
return helper_->Run(p1);
}
bool empty() const { return !helper_; }
private:
struct Helper : RefCountInterface {
virtual ~Helper() {}
virtual R Run(P1 p1) = 0;
};
template <class T> struct HelperImpl : Helper {
explicit HelperImpl(const T& functor) : functor_(functor) {}
virtual R Run(P1 p1) {
return functor_(p1);
}
T functor_;
};
scoped_refptr<Helper> helper_;
};
template <class R,
class P1,
class P2>
class Callback2 {
public:
// Default copy operations are appropriate for this class.
Callback2() {}
template <class T> Callback2(const T& functor)
: helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
R operator()(P1 p1, P2 p2) {
if (empty())
return R();
return helper_->Run(p1, p2);
}
bool empty() const { return !helper_; }
private:
struct Helper : RefCountInterface {
virtual ~Helper() {}
virtual R Run(P1 p1, P2 p2) = 0;
};
template <class T> struct HelperImpl : Helper {
explicit HelperImpl(const T& functor) : functor_(functor) {}
virtual R Run(P1 p1, P2 p2) {
return functor_(p1, p2);
}
T functor_;
};
scoped_refptr<Helper> helper_;
};
template <class R,
class P1,
class P2,
class P3>
class Callback3 {
public:
// Default copy operations are appropriate for this class.
Callback3() {}
template <class T> Callback3(const T& functor)
: helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
R operator()(P1 p1, P2 p2, P3 p3) {
if (empty())
return R();
return helper_->Run(p1, p2, p3);
}
bool empty() const { return !helper_; }
private:
struct Helper : RefCountInterface {
virtual ~Helper() {}
virtual R Run(P1 p1, P2 p2, P3 p3) = 0;
};
template <class T> struct HelperImpl : Helper {
explicit HelperImpl(const T& functor) : functor_(functor) {}
virtual R Run(P1 p1, P2 p2, P3 p3) {
return functor_(p1, p2, p3);
}
T functor_;
};
scoped_refptr<Helper> helper_;
};
template <class R,
class P1,
class P2,
class P3,
class P4>
class Callback4 {
public:
// Default copy operations are appropriate for this class.
Callback4() {}
template <class T> Callback4(const T& functor)
: helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
R operator()(P1 p1, P2 p2, P3 p3, P4 p4) {
if (empty())
return R();
return helper_->Run(p1, p2, p3, p4);
}
bool empty() const { return !helper_; }
private:
struct Helper : RefCountInterface {
virtual ~Helper() {}
virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4) = 0;
};
template <class T> struct HelperImpl : Helper {
explicit HelperImpl(const T& functor) : functor_(functor) {}
virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4) {
return functor_(p1, p2, p3, p4);
}
T functor_;
};
scoped_refptr<Helper> helper_;
};
template <class R,
class P1,
class P2,
class P3,
class P4,
class P5>
class Callback5 {
public:
// Default copy operations are appropriate for this class.
Callback5() {}
template <class T> Callback5(const T& functor)
: helper_(new RefCountedObject< HelperImpl<T> >(functor)) {}
R operator()(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) {
if (empty())
return R();
return helper_->Run(p1, p2, p3, p4, p5);
}
bool empty() const { return !helper_; }
private:
struct Helper : RefCountInterface {
virtual ~Helper() {}
virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) = 0;
};
template <class T> struct HelperImpl : Helper {
explicit HelperImpl(const T& functor) : functor_(functor) {}
virtual R Run(P1 p1, P2 p2, P3 p3, P4 p4, P5 p5) {
return functor_(p1, p2, p3, p4, p5);
}
T functor_;
};
scoped_refptr<Helper> helper_;
};
} // namespace rtc
#endif // WEBRTC_BASE_CALLBACK_H_
| 2,929 |
2,061 | package com.example.liuyongkui.tvdemo;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
/**
* Created by LIUYONGKUI726 on 2016-03-22.
*/
public class MyTestView extends TextView {
private final static String Tag = "MyTestView";
public MyTestView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
}
public MyTestView(Context context) {
super(context);
}
public MyTestView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public MyTestView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
Log.d(Tag, "onMeasure()");
Toast.makeText(getContext(), "onMeasure", Toast.LENGTH_LONG).show();
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
Log.d(Tag, "onLayout()");
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
Log.d(Tag, "onDraw");
}
}
| 549 |
561 | #pragma once
#include "narray/image_batch.h"
#include "narray/convolution_info.h"
namespace minerva {
class Convolution {
public:
static ImageBatch ConvForward(ImageBatch src, Filter filter, NArray bias, ConvInfo info);
static ImageBatch ConvBackwardData(ImageBatch diff, ImageBatch bottom, Filter filter, ConvInfo info);
static Filter ConvBackwardFilter(ImageBatch diff, ImageBatch bottom, Filter filter, ConvInfo info);
static NArray ConvBackwardBias(ImageBatch diff);
static ImageBatch SoftmaxForward(ImageBatch src, SoftmaxAlgorithm algorithm);
static ImageBatch SoftmaxBackward(ImageBatch diff, ImageBatch top, SoftmaxAlgorithm algorithm);
static ImageBatch ActivationForward(ImageBatch src, ActivationAlgorithm algorithm);
static ImageBatch ActivationBackward(ImageBatch diff, ImageBatch top, ImageBatch bottom, ActivationAlgorithm algorithm);
static ImageBatch PoolingForward(ImageBatch src, PoolingInfo info);
static ImageBatch PoolingBackward(ImageBatch diff, ImageBatch top, ImageBatch bottom, PoolingInfo info);
static ImageBatch LRNForward(ImageBatch src, ImageBatch scale, int local_size, float alpha, float beta);
static ImageBatch LRNBackward(ImageBatch bottom_data, ImageBatch top_data, ImageBatch scale, ImageBatch top_diff , int local_size, float alpha, float beta);
};
} // namespace minerva
| 386 |
4,054 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.indexinglanguage.expressions;
import com.yahoo.document.DataType;
import com.yahoo.document.Field;
import com.yahoo.document.StructDataType;
import com.yahoo.document.datatypes.Array;
import com.yahoo.document.datatypes.FieldValue;
import com.yahoo.document.datatypes.IntegerFieldValue;
import com.yahoo.document.datatypes.StringFieldValue;
import com.yahoo.document.datatypes.Struct;
import com.yahoo.document.datatypes.WeightedSet;
import com.yahoo.vespa.indexinglanguage.SimpleTestAdapter;
import org.junit.Test;
import java.util.LinkedList;
import java.util.List;
import static com.yahoo.vespa.indexinglanguage.expressions.ExpressionAssert.assertVerify;
import static com.yahoo.vespa.indexinglanguage.expressions.ExpressionAssert.assertVerifyThrows;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* @author <NAME>
*/
@SuppressWarnings({ "rawtypes" })
public class ForEachTestCase {
@Test
public void requireThatAccessorsWork() {
Expression innerExp = new AttributeExpression("foo");
ForEachExpression exp = new ForEachExpression(innerExp);
assertSame(innerExp, exp.getInnerExpression());
}
@Test
public void requireThatHashCodeAndEqualsAreImplemented() {
Expression innerExp = new AttributeExpression("foo");
Expression exp = new ForEachExpression(innerExp);
assertFalse(exp.equals(new Object()));
assertFalse(exp.equals(new ForEachExpression(new AttributeExpression("bar"))));
assertEquals(exp, new ForEachExpression(innerExp));
assertEquals(exp.hashCode(), new ForEachExpression(innerExp).hashCode());
}
@Test
public void requireThatExpressionCanBeVerified() {
Expression exp = new ForEachExpression(SimpleExpression.newConversion(DataType.INT, DataType.STRING));
assertVerify(DataType.getArray(DataType.INT), exp, DataType.getArray(DataType.STRING));
assertVerifyThrows(null, exp, "Expected any input, got null.");
assertVerifyThrows(DataType.INT, exp, "Expected Array, Struct or WeightedSet input, got int.");
assertVerifyThrows(DataType.getArray(DataType.STRING), exp, "Expected int input, got string.");
}
@Test
public void requireThatStructFieldCompatibilityIsVerified() {
StructDataType type = new StructDataType("my_struct");
type.addField(new Field("foo", DataType.INT));
assertVerify(type, new ForEachExpression(new SimpleExpression()), type);
assertVerifyThrows(type, new ForEachExpression(SimpleExpression.newConversion(DataType.STRING, DataType.INT)),
"Expected string input, got int.");
assertVerifyThrows(type, new ForEachExpression(SimpleExpression.newConversion(DataType.INT, DataType.STRING)),
"Expected int output, got string.");
}
@Test
public void requireThatEachTokenIsExecutedSeparately() {
ExecutionContext ctx = new ExecutionContext(new SimpleTestAdapter());
Array<StringFieldValue> arr = new Array<>(DataType.getArray(DataType.STRING));
arr.add(new StringFieldValue("6"));
arr.add(new StringFieldValue("9"));
ctx.setValue(arr);
MyCollector exp = new MyCollector();
new ForEachExpression(exp).execute(ctx);
assertEquals(2, exp.lst.size());
FieldValue val = exp.lst.get(0);
assertTrue(val instanceof StringFieldValue);
assertEquals("6", ((StringFieldValue)val).getString());
val = exp.lst.get(1);
assertTrue(val instanceof StringFieldValue);
assertEquals("9", ((StringFieldValue)val).getString());
}
@Test
public void requireThatCreatedOutputTypeDependsOnInnerExpression() {
assertNull(new ForEachExpression(new SimpleExpression()).createdOutputType());
assertNotNull(new ForEachExpression(new SetValueExpression(new IntegerFieldValue(69))).createdOutputType());
}
@Test
public void requireThatArrayCanBeConverted() {
ExecutionContext ctx = new ExecutionContext(new SimpleTestAdapter());
Array<StringFieldValue> before = new Array<>(DataType.getArray(DataType.STRING));
before.add(new StringFieldValue("6"));
before.add(new StringFieldValue("9"));
ctx.setValue(before);
new ForEachExpression(new ToIntegerExpression()).execute(ctx);
FieldValue val = ctx.getValue();
assertTrue(val instanceof Array);
Array after = (Array)val;
assertEquals(2, after.size());
assertEquals(new IntegerFieldValue(6), after.get(0));
assertEquals(new IntegerFieldValue(9), after.get(1));
}
@Test
public void requireThatEmptyArrayCanBeConverted() {
ExecutionContext ctx = new ExecutionContext(new SimpleTestAdapter());
ctx.setValue(new Array<StringFieldValue>(DataType.getArray(DataType.STRING)));
new ForEachExpression(new ToIntegerExpression()).execute(ctx);
FieldValue val = ctx.getValue();
assertTrue(val instanceof Array);
assertEquals(DataType.INT, ((Array)val).getDataType().getNestedType());
assertTrue(((Array)val).isEmpty());
}
@Test
public void requireThatIllegalInputValueThrows() {
try {
new ForEachExpression(new SimpleExpression()).execute(new StringFieldValue("foo"));
fail();
} catch (IllegalArgumentException e) {
assertEquals("Expected Array, Struct or WeightedSet input, got string.", e.getMessage());
}
}
@Test
public void requireThatArrayWithNullCanBeConverted() {
ExecutionContext ctx = new ExecutionContext(new SimpleTestAdapter());
Array<StringFieldValue> arr = new Array<>(DataType.getArray(DataType.STRING));
arr.add(new StringFieldValue("foo"));
ctx.setValue(arr);
new ForEachExpression(SimpleExpression.newConversion(DataType.STRING, DataType.INT)
.setExecuteValue(null)).execute(ctx);
FieldValue val = ctx.getValue();
assertTrue(val instanceof Array);
assertEquals(DataType.INT, ((Array)val).getDataType().getNestedType());
assertTrue(((Array)val).isEmpty());
}
@Test
public void requireThatWsetCanBeConverted() {
ExecutionContext ctx = new ExecutionContext(new SimpleTestAdapter());
WeightedSet<StringFieldValue> before = new WeightedSet<>(DataType.getWeightedSet(DataType.STRING));
before.put(new StringFieldValue("6"), 9);
before.put(new StringFieldValue("9"), 6);
ctx.setValue(before);
new ForEachExpression(new ToIntegerExpression()).execute(ctx);
FieldValue val = ctx.getValue();
assertTrue(val instanceof WeightedSet);
WeightedSet after = (WeightedSet)val;
assertEquals(2, after.size());
assertEquals(Integer.valueOf(9), after.get(new IntegerFieldValue(6)));
assertEquals(Integer.valueOf(6), after.get(new IntegerFieldValue(9)));
}
@Test
public void requireThatEmptyWsetCanBeConverted() {
ExecutionContext ctx = new ExecutionContext(new SimpleTestAdapter());
ctx.setValue(new WeightedSet<StringFieldValue>(DataType.getWeightedSet(DataType.STRING)));
new ForEachExpression(new ToIntegerExpression()).execute(ctx);
FieldValue val = ctx.getValue();
assertTrue(val instanceof WeightedSet);
assertEquals(DataType.INT, ((WeightedSet)val).getDataType().getNestedType());
assertTrue(((WeightedSet)val).isEmpty());
}
@Test
public void requireThatStructContentCanBeConverted() {
StructDataType type = new StructDataType("my_type");
type.addField(new Field("my_str", DataType.STRING));
Struct struct = new Struct(type);
struct.setFieldValue("my_str", new StringFieldValue(" foo "));
ExecutionContext ctx = new ExecutionContext(new SimpleTestAdapter());
ctx.setValue(struct);
new ForEachExpression(new TrimExpression()).execute(ctx);
FieldValue val = ctx.getValue();
assertTrue(val instanceof Struct);
assertEquals(type, val.getDataType());
assertEquals(new StringFieldValue("foo"), ((Struct)val).getFieldValue("my_str"));
}
@Test
public void requireThatIncompatibleStructFieldsFailToValidate() {
StructDataType type = new StructDataType("my_type");
type.addField(new Field("my_int", DataType.INT));
VerificationContext ctx = new VerificationContext(new SimpleTestAdapter());
ctx.setValueType(type);
try {
new ForEachExpression(new ToArrayExpression()).verify(ctx);
fail();
} catch (VerificationException e) {
assertEquals("Expected int output, got Array<int>.", e.getMessage());
}
}
@Test
public void requireThatIncompatibleStructFieldsFailToExecute() {
StructDataType type = new StructDataType("my_type");
type.addField(new Field("my_int", DataType.INT));
Struct struct = new Struct(type);
struct.setFieldValue("my_int", new IntegerFieldValue(69));
ExecutionContext ctx = new ExecutionContext(new SimpleTestAdapter());
ctx.setValue(struct);
try {
new ForEachExpression(new ToArrayExpression()).execute(ctx);
fail();
} catch (IllegalArgumentException e) {
assertEquals("Class class com.yahoo.document.datatypes.Array not applicable to an class " +
"com.yahoo.document.datatypes.IntegerFieldValue instance.", e.getMessage());
}
}
private static class MyCollector extends Expression {
List<FieldValue> lst = new LinkedList<>();
MyCollector() {
super(null);
}
@Override
protected void doExecute(ExecutionContext context) {
lst.add(context.getValue());
}
@Override
protected void doVerify(VerificationContext context) {
}
@Override
public DataType createdOutputType() {
return null;
}
}
}
| 4,006 |
507 | <reponame>alesmuc/bauh
import os
from bauh.api.constants import CACHE_PATH, CONFIG_PATH
from bauh.commons import resource
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
SNAP_CACHE_PATH = CACHE_PATH + '/snap'
CONFIG_FILE = '{}/snap.yml'.format(CONFIG_PATH)
CATEGORIES_FILE_PATH = SNAP_CACHE_PATH + '/categories.txt'
URL_CATEGORIES_FILE = 'https://raw.githubusercontent.com/vinifmor/bauh-files/master/snap/categories.txt'
SUGGESTIONS_FILE = 'https://raw.githubusercontent.com/vinifmor/bauh-files/master/snap/suggestions.txt'
def get_icon_path() -> str:
return resource.get_path('img/snap.svg', ROOT_DIR)
| 252 |
674 | """
Archive tools for wheel.
"""
import logging
import os.path
import zipfile
log = logging.getLogger("wheel")
def archive_wheelfile(base_name, base_dir):
'''Archive all files under `base_dir` in a whl file and name it like
`base_name`.
'''
olddir = os.path.abspath(os.curdir)
base_name = os.path.abspath(base_name)
try:
os.chdir(base_dir)
return make_wheelfile_inner(base_name)
finally:
os.chdir(olddir)
def make_wheelfile_inner(base_name, base_dir='.'):
"""Create a whl file from all the files under 'base_dir'.
Places .dist-info at the end of the archive."""
zip_filename = base_name + ".whl"
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
# XXX support bz2, xz when available
zip = zipfile.ZipFile(open(zip_filename, "wb+"), "w",
compression=zipfile.ZIP_DEFLATED)
score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3}
deferred = []
def writefile(path):
zip.write(path, path)
log.info("adding '%s'" % path)
for dirpath, dirnames, filenames in os.walk(base_dir):
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
if dirpath.endswith('.dist-info'):
deferred.append((score.get(name, 0), path))
else:
writefile(path)
deferred.sort()
for score, path in deferred:
writefile(path)
zip.close()
return zip_filename
| 710 |
1,647 | <gh_stars>1000+
#ifndef PYTHONIC_NUMPY_AROUND_HPP
#define PYTHONIC_NUMPY_AROUND_HPP
#include "pythonic/include/numpy/around.hpp"
#include "pythonic/numpy/rint.hpp"
#include "pythonic/numpy/power.hpp"
#include "pythonic/numpy/asarray.hpp"
#include "pythonic/numpy/floor_divide.hpp"
#include "pythonic/numpy/float64.hpp"
#include "pythonic/numpy/multiply.hpp"
PYTHONIC_NS_BEGIN
namespace numpy
{
// fast path
template <class E>
auto around(E &&a) -> decltype(functor::rint{}(std::forward<E>(a)))
{
return functor::rint{}(std::forward<E>(a));
}
// generic floating point version, pure numpy_expr
template <class E>
auto around(E &&a, long decimals) -> typename std::enable_if<
!std::is_integral<
typename types::dtype_of<typename std::decay<E>::type>::type>::value,
decltype(functor::rint{}(functor::multiply{}(
std::forward<E>(a),
std::declval<typename types::dtype_of<
typename std::decay<E>::type>::type>())) /
std::declval<typename types::dtype_of<
typename std::decay<E>::type>::type>())>::type
{
typename types::dtype_of<typename std::decay<E>::type>::type const fact =
functor::power{}(10., decimals);
return functor::rint{}(functor::multiply{}(std::forward<E>(a), fact)) /
fact;
}
// the integer version is only relevant when decimals < 0
template <class E>
auto around(E &&a, long decimals) -> typename std::enable_if<
std::is_integral<
typename types::dtype_of<typename std::decay<E>::type>::type>::value,
decltype(numpy::functor::floor_divide{}(
functor::float64{}(std::forward<E>(a)),
std::declval<typename types::dtype_of<
typename std::decay<E>::type>::type>()) *
std::declval<typename types::dtype_of<
typename std::decay<E>::type>::type>())>::type
{
typename types::dtype_of<typename std::decay<E>::type>::type const fact =
functor::power{}(10L, std::max(0L, -decimals));
return pythonic::numpy::functor::floor_divide{}(
functor::float64{}(std::forward<E>(a)), fact) *
fact;
}
}
PYTHONIC_NS_END
#endif
| 1,087 |
521 | <reponame>Fimbure/icebox-1
/* Copyright (c) 2001, Stanford University
* All rights reserved
*
* See the file LICENSE.txt for information on redistributing this software.
*/
#include "chromium.h"
#include "cr_error.h"
#include "cr_mem.h"
#include "server_dispatch.h"
#include "server.h"
void SERVER_DISPATCH_APIENTRY
crServerDispatchGenQueriesARB(GLsizei n, GLuint *queries)
{
GLuint *local_queries;
(void) queries;
if (n <= 0 || n >= INT32_MAX / sizeof(GLuint))
{
crError("crServerDispatchGenQueriesARB: parameter 'n' is out of range");
return;
}
local_queries = (GLuint *)crCalloc(n * sizeof(*local_queries));
if (!local_queries)
{
crError("crServerDispatchGenQueriesARB: out of memory");
return;
}
cr_server.head_spu->dispatch_table.GenQueriesARB( n, local_queries );
crServerReturnValue( local_queries, n * sizeof(*local_queries) );
crFree( local_queries );
}
| 374 |
764 | //
// SwiftyDrop.h
// SwiftyDrop
//
// Created by MORITANAOKI on 2015/06/21.
// Copyright (c) 2015年 MORITANAOKI. All rights reserved.
//
#import <UIKit/UIKit.h>
//! Project version number for SwiftyDrop.
FOUNDATION_EXPORT double SwiftyDropVersionNumber;
//! Project version string for SwiftyDrop.
FOUNDATION_EXPORT const unsigned char SwiftyDropVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <SwiftyDrop/PublicHeader.h>
| 158 |
501 | import numpy as np
from numpy.testing import assert_array_equal, assert_equal
from nose.tools import assert_raises
from pystruct.models import ChainCRF
def test_initialize():
rnd = np.random.RandomState(0)
x = rnd.normal(size=(13, 5))
y = rnd.randint(3, size=13)
crf = ChainCRF(n_states=3, n_features=5)
# no-op
crf.initialize([x], [y])
#test initialization works
crf = ChainCRF()
crf.initialize([x], [y])
assert_equal(crf.n_states, 3)
assert_equal(crf.n_features, 5)
crf = ChainCRF(n_states=2)
assert_raises(ValueError, crf.initialize, X=[x], Y=[y])
pass
def test_directed_chain():
# check that a directed model actually works differntly in the two
# directions. chain of length three, three states 0, 1, 2 which want to be
# in this order, evidence only in the middle
x = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 0]])
w = np.array([1, 0, 0, # unary
0, 1, 0,
0, 0, 1,
0, 1, 0, # pairwise
0, 0, 1,
0, 0, 0])
crf = ChainCRF(n_states=3, n_features=3)
y = crf.inference(x, w)
assert_array_equal([0, 1, 2], y)
| 563 |
14,499 | <filename>infer/tests/codetoanalyze/java/biabduction/DoubleExample.java
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package codetoanalyze.java.infer;
import javax.annotation.Nullable;
public class DoubleExample {
@Nullable Double x;
private Double testAssignNonNullOk() {
x = 1.0;
return x + 1.0;
}
private Double testdReadNullableBad() {
return x + 1.0;
}
}
| 179 |
718 | package j2html.tags.attributes;
import j2html.tags.IInstance;
import j2html.tags.Tag;
public interface IHreflang<T extends Tag<T>> extends IInstance<T> {
default T withHreflang(final String hreflang_) {
return self().attr("hreflang", hreflang_);
}
default T withCondHreflang(final boolean enable, final String hreflang_) {
if (enable) {
self().attr("hreflang", hreflang_);
}
return self();
}
}
| 199 |
818 | /*
GRT MIT License
Copyright (c) <2012> <<NAME>, Media Lab, MIT>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*
GRT DeadZone Example
This example demonstrates how to create and use the GRT DeadZone PreProcessing Module.
The DeadZone module sets any values in the input signal that fall within the dead-zone region to zero.
Any values outside of the dead-zone region will be offset by the dead zone's lower limit and upper limit.
In this example we create a DeadZone with a dead-zone region of [-0.2 0.2] for a 1 dimensional signal, we
then generate some dummy data and run this data through the DeadZone. The dummy signal and dead-zone filtered
signal are then printed to std::cout.
This example shows you how to:
- Create a new DeadZone instance
- Filter some dummy data using the DeadZone
- Save the DeadZone instances' settings to a file
- Load the DeadZone instances' settings from a file
*/
//You might need to set the specific path of the GRT header relative to your project
#include <GRT/GRT.h>
using namespace GRT;
using namespace std;
int main (int argc, const char * argv[])
{
//Create a new DeadZone instance
//Set the lower limit to -0.2, and upper limit of 2.0
//Set the dimensionality of the input signal to 1
DeadZone deadZone(-0.2,0.2,1);
//Create some dummy data and filter it using the dead zone
Random random;
UINT M = 1000;
UINT signalCounter = 0;
for(UINT i=0; i<M; i++){
//Generate the signal
double signal = 0;
if( signalCounter < 50 ){
signal = random.getRandomNumberUniform(-0.15,0.15);
}else{
if( signalCounter >= 50 && signalCounter < 100 ){
signal = 1.0;
}else{
signal = -1.0;
}
}
if( ++signalCounter >= 150 ) signalCounter = 0;
//Filter the signal using the dead zone
double filteredSignal = deadZone.filter( signal );
cout << signal << "\t" << filteredSignal << endl;
}
//If we need to save the deadzone settings to a file then we can
deadZone.save("DeadZoneSettings.grt");
//We can then load the settings later if needed
deadZone.load("DeadZoneSettings.grt");
return EXIT_SUCCESS;
}
| 1,089 |
14,499 | <gh_stars>1000+
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package android.database;
import java.io.IOException;
public class CursorWrapper implements Cursor {
protected final Cursor mCursor;
public CursorWrapper(Cursor cursor) {
mCursor = cursor;
}
public void close() {
try {
mCursor.close();
} catch (IOException e) {
}
}
}
| 168 |
3,587 | /// Copyright 2021 Google Inc. All rights reserved.
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
#import <DiskArbitration/DiskArbitration.h>
#import <Foundation/Foundation.h>
#include <EndpointSecurity/EndpointSecurity.h>
/*
* Manages DiskArbitration and EndpointSecurity to monitor/block/remount USB
* storage devices.
*/
@interface SNTDeviceManager : NSObject
@property(nonatomic, readwrite) BOOL subscribed;
@property(nonatomic, readwrite) BOOL blockUSBMount;
@property(nonatomic, readwrite) NSArray<NSString *> *remountArgs;
- (instancetype)init;
- (void)listen;
- (BOOL)subscribed;
@end
| 336 |
14,525 | <reponame>leonarduschen/zipline
from zipline.utils.calendars import get_calendar
class ExchangeInfo(object):
"""An exchange where assets are traded.
Parameters
----------
name : str or None
The full name of the exchange, for example 'NEW YORK STOCK EXCHANGE' or
'NASDAQ GLOBAL MARKET'.
canonical_name : str
The canonical name of the exchange, for example 'NYSE' or 'NASDAQ'. If
None this will be the same as the name.
country_code : str
The country code where the exchange is located.
Attributes
----------
name : str or None
The full name of the exchange, for example 'NEW YORK STOCK EXCHANGE' or
'NASDAQ GLOBAL MARKET'.
canonical_name : str
The canonical name of the exchange, for example 'NYSE' or 'NASDAQ'. If
None this will be the same as the name.
country_code : str
The country code where the exchange is located.
calendar : TradingCalendar
The trading calendar the exchange uses.
"""
def __init__(self, name, canonical_name, country_code):
self.name = name
if canonical_name is None:
canonical_name = name
self.canonical_name = canonical_name
self.country_code = country_code.upper()
def __repr__(self):
return '%s(%r, %r, %r)' % (
type(self).__name__,
self.name,
self.canonical_name,
self.country_code,
)
@property
def calendar(self):
"""The trading calendar that this exchange uses.
"""
return get_calendar(self.canonical_name)
def __eq__(self, other):
if not isinstance(other, ExchangeInfo):
return NotImplemented
return all(
getattr(self, attr) == getattr(other, attr)
for attr in ('name', 'canonical_name', 'country_code')
)
def __ne__(self, other):
eq = self == other
if eq is NotImplemented:
return NotImplemented
return not eq
| 845 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.bugtracking.vcs;
import java.awt.Image;
import java.util.Arrays;
import java.util.Collection;
import org.netbeans.modules.bugtracking.TestIssue;
import org.netbeans.modules.bugtracking.TestRepository;
import org.netbeans.modules.bugtracking.spi.*;
/**
*
* @author <NAME>
*/
public class HookRepository extends TestRepository {
private final RepositoryInfo info = new RepositoryInfo("HookRepository", "HookRepository", "http://url", "HookRepository", "HookRepository", null, null, null, null);
@Override
public RepositoryInfo getInfo() {
return info;
}
@Override
public Image getIcon() {
return null;
}
@Override
public Collection<TestIssue> getIssues(String[] id) {
return Arrays.asList(new TestIssue[] {HookIssue.instance});
}
@Override
public Collection<TestIssue> simpleSearch(String criteria) {
return Arrays.asList(new TestIssue[] {HookIssue.instance});
}
}
| 541 |
818 | /*
* Copyright 2021 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.kogito.codegen.rules;
import org.drools.modelcompiler.builder.QueryModel;
import org.kie.internal.ruleunit.RuleUnitDescription;
import org.kie.kogito.codegen.api.GeneratedFile;
import org.kie.kogito.codegen.api.GeneratedFileType;
import org.kie.kogito.codegen.api.context.KogitoBuildContext;
import org.kie.kogito.codegen.api.template.InvalidTemplateException;
import com.github.javaparser.ast.CompilationUnit;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.ConstructorDeclaration;
import com.github.javaparser.ast.expr.StringLiteralExpr;
import com.github.javaparser.ast.type.ClassOrInterfaceType;
public class QueryEventDrivenExecutorGenerator extends AbstractQueryEntrypointGenerator {
private final String dataType;
private final String returnType;
public QueryEventDrivenExecutorGenerator(RuleUnitDescription ruleUnit, QueryModel query, KogitoBuildContext context) {
super(ruleUnit, query, context, "EventDrivenExecutor", "EventDrivenExecutor");
this.dataType = ruleUnit.getCanonicalName() + (context.hasDI() ? "" : "DTO");
this.returnType = String.format("java.util.List<%s>", query.getBindings().size() != 1
? queryClassName + ".Result"
: query.getBindings().values().iterator().next().getCanonicalName());
}
@Override
public GeneratedFile generate() {
CompilationUnit cu = generator.compilationUnitOrThrow("Could not create CompilationUnit");
ClassOrInterfaceDeclaration classDecl = cu.findFirst(ClassOrInterfaceDeclaration.class)
.orElseThrow(() -> new InvalidTemplateException(generator, "Cannot find class declaration"));
classDecl.setName(targetClassName);
classDecl.findAll(ClassOrInterfaceType.class).forEach(this::interpolateClassOrInterfaceType);
classDecl.findAll(ConstructorDeclaration.class).forEach(this::interpolateConstructorDeclaration);
classDecl.findAll(StringLiteralExpr.class).forEach(this::interpolateStringLiteral);
return new GeneratedFile(GeneratedFileType.SOURCE, generatedFilePath(), cu.toString());
}
private void interpolateClassOrInterfaceType(ClassOrInterfaceType input) {
input.setName(interpolatedTypeNameFrom(input.getNameAsString()));
}
private void interpolateConstructorDeclaration(ConstructorDeclaration input) {
input.setName(interpolatedTypeNameFrom(input.getNameAsString()));
}
private void interpolateStringLiteral(StringLiteralExpr input) {
input.setString(input.getValue().replace("$name$", queryName));
}
private String interpolatedTypeNameFrom(String input) {
return input.replace("$QueryType$", queryClassName)
.replace("$DataType$", dataType)
.replace("$ReturnType$", returnType);
}
}
| 1,163 |
1,002 | // Copyright (c) Microsoft Corporation. All rights reserved.
//
// Licensed under the MIT License. See LICENSE.txt in the project root for license information.
#pragma once
//
// This mock object is given to a GraphicsDevice so that we can check that it
// calls the methods we expect to call on it.
//
class MockDxgiDevice : public RuntimeClass<
RuntimeClassFlags<ClassicCom>,
ChainInterfaces<IDXGIDevice3, IDXGIDevice2, IDXGIDevice1, IDXGIDevice>>
{
public:
std::function<void()> MockTrim;
std::function<HRESULT(IID const&, void **)> MockGetParent;
std::function<HRESULT(IDXGIAdapter**)> MockGetAdapter;
STDMETHODIMP SetPrivateData(GUID const&,UINT,const void *)
{
Assert::Fail(L"Unexpected call to SetPrivateData");
return E_NOTIMPL;
}
STDMETHODIMP SetPrivateDataInterface(GUID const&,const IUnknown *)
{
Assert::Fail(L"Unexpected call to SetPrivateDataInterface");
return E_NOTIMPL;
}
STDMETHODIMP GetPrivateData(GUID const&,UINT *,void *)
{
Assert::Fail(L"Unexpected call to GetPrivateData");
return E_NOTIMPL;
}
STDMETHODIMP GetParent(IID const& iid, void** out)
{
if (MockGetParent)
{
return MockGetParent(iid, out);
}
Assert::Fail(L"Unexpected call to GetParent");
return E_NOTIMPL;
}
STDMETHODIMP GetAdapter(IDXGIAdapter** adapter)
{
if (MockGetAdapter)
{
return MockGetAdapter(adapter);
}
Assert::Fail(L"Unexpected call to GetAdapter");
return E_NOTIMPL;
}
STDMETHODIMP CreateSurface(const DXGI_SURFACE_DESC *,UINT,DXGI_USAGE,const DXGI_SHARED_RESOURCE *,IDXGISurface **)
{
Assert::Fail(L"Unexpected call to CreateSurface");
return E_NOTIMPL;
}
STDMETHODIMP QueryResourceResidency(IUnknown *const *,DXGI_RESIDENCY *,UINT)
{
Assert::Fail(L"Unexpected call to QueryResourceResidency");
return E_NOTIMPL;
}
STDMETHODIMP SetGPUThreadPriority(INT)
{
Assert::Fail(L"Unexpected call to SetGPUThreadPriority");
return E_NOTIMPL;
}
STDMETHODIMP GetGPUThreadPriority(INT *)
{
Assert::Fail(L"Unexpected call to GetGPUThreadPriority");
return E_NOTIMPL;
}
STDMETHODIMP SetMaximumFrameLatency(UINT)
{
Assert::Fail(L"Unexpected call to SetMaximumFrameLatency");
return E_NOTIMPL;
}
STDMETHODIMP GetMaximumFrameLatency(UINT *)
{
Assert::Fail(L"Unexpected call to GetMaximumFrameLatency");
return E_NOTIMPL;
}
STDMETHODIMP OfferResources(UINT,IDXGIResource *const *,DXGI_OFFER_RESOURCE_PRIORITY)
{
Assert::Fail(L"Unexpected call to OfferResources");
return E_NOTIMPL;
}
STDMETHODIMP ReclaimResources(UINT,IDXGIResource *const *,BOOL *)
{
Assert::Fail(L"Unexpected call to ReclaimResources");
return E_NOTIMPL;
}
STDMETHODIMP EnqueueSetEvent(HANDLE)
{
Assert::Fail(L"Unexpected call to EnqueueSetEvent");
return E_NOTIMPL;
}
STDMETHODIMP_(void) Trim()
{
if (MockTrim)
{
MockTrim();
}
else
{
Assert::Fail(L"Unexpected call to Trim");
}
}
};
| 1,478 |
6,059 | <reponame>penguin-wwy/redex
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.redex.examples.synth;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
TextView textView = (TextView) findViewById(R.id.message);
textView.setText("Redex Synth Example\n");
Alpha a = new Alpha(12);
Alpha.Beta b = a.new Beta();
textView.append("Double Alpha(12) = " + b.doubleAlpha() + "\n");
}
}
| 304 |
474 | <filename>GVRf/Extensions/gvrf-physics/src/main/jni/engine/bullet/bullet_sliderconstraint.h
/* Copyright 2015 Samsung Electronics Co., LTD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// Created by c.bozzetto on 31/05/2017.
//
#ifndef EXTENSIONS_BULLET_SLIDERCONSTRAINT_H
#define EXTENSIONS_BULLET_SLIDERCONSTRAINT_H
#include "../physics_sliderconstraint.h"
#include "bullet_object.h"
class btSliderConstraint;
namespace gvr {
class PhysicsRigidBody;
class BulletRigidBody;
class BulletSliderConstraint : public PhysicsSliderConstraint,
BulletObject {
public:
explicit BulletSliderConstraint(PhysicsRigidBody *rigidBodyB);
BulletSliderConstraint(btSliderConstraint *constraint);
virtual ~BulletSliderConstraint();
void setAngularLowerLimit(float limit);
float getAngularLowerLimit() const;
void setAngularUpperLimit(float limit);
float getAngularUpperLimit() const;
void setLinearLowerLimit(float limit);
float getLinearLowerLimit() const;
void setLinearUpperLimit(float limit);
float getLinearUpperLimit() const;
void setBreakingImpulse(float impulse);
float getBreakingImpulse() const;
void *getUnderlying() { return mSliderConstraint; }
void updateConstructionInfo();
private:
btSliderConstraint *mSliderConstraint;
BulletRigidBody *mRigidBodyB;
float mBreakingImpulse;
float mLowerAngularLimit;
float mUpperAngularLimit;
float mLowerLinearLimit;
float mUpperLinearLimit;
};
}
#endif //EXTENSIONS_BULLET_SLIDERCONSTRAINT_H
| 836 |
14,793 | package me.chanjar.weixin.common.util.http.jodd;
import jodd.http.HttpConnectionProvider;
import jodd.http.HttpRequest;
import jodd.http.HttpResponse;
import jodd.http.ProxyInfo;
import lombok.extern.slf4j.Slf4j;
import me.chanjar.weixin.common.bean.result.WxMediaUploadResult;
import me.chanjar.weixin.common.bean.result.WxMinishopImageUploadResult;
import me.chanjar.weixin.common.enums.WxType;
import me.chanjar.weixin.common.error.WxError;
import me.chanjar.weixin.common.error.WxErrorException;
import me.chanjar.weixin.common.util.http.MediaUploadRequestExecutor;
import me.chanjar.weixin.common.util.http.MinishopUploadRequestExecutor;
import me.chanjar.weixin.common.util.http.RequestHttp;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
/**
* .
*
* @author ecoolper
* @date 2017/5/5
*/
@Slf4j
public class JoddHttpMinishopMediaUploadRequestExecutor extends MinishopUploadRequestExecutor<HttpConnectionProvider, ProxyInfo> {
public JoddHttpMinishopMediaUploadRequestExecutor(RequestHttp requestHttp) {
super(requestHttp);
}
@Override
public WxMinishopImageUploadResult execute(String uri, File file, WxType wxType) throws WxErrorException, IOException {
HttpRequest request = HttpRequest.post(uri);
if (requestHttp.getRequestHttpProxy() != null) {
requestHttp.getRequestHttpClient().useProxy(requestHttp.getRequestHttpProxy());
}
request.withConnectionProvider(requestHttp.getRequestHttpClient());
request.form("media", file);
HttpResponse response = request.send();
response.charset(StandardCharsets.UTF_8.name());
String responseContent = response.bodyText();
WxError error = WxError.fromJson(responseContent, wxType);
if (error.getErrorCode() != 0) {
throw new WxErrorException(error);
}
log.info("responseContent: " + responseContent);
return WxMinishopImageUploadResult.fromJson(responseContent);
}
}
| 664 |
2,816 | <filename>third_party/sqlsmith/schema.cc<gh_stars>1000+
#include "schema.hh"
#include "relmodel.hh"
#include <typeinfo>
using namespace std;
void schema::generate_indexes() {
cerr << "Generating indexes...";
for (auto &type : types) {
assert(type);
for (auto &r : aggregates) {
if (type->consistent(r.restype))
aggregates_returning_type.insert(pair<sqltype *, routine *>(type, &r));
}
for (auto &r : routines) {
if (!type->consistent(r.restype))
continue;
routines_returning_type.insert(pair<sqltype *, routine *>(type, &r));
if (!r.argtypes.size())
parameterless_routines_returning_type.insert(pair<sqltype *, routine *>(type, &r));
}
for (auto &t : tables) {
for (auto &c : t.columns()) {
if (type->consistent(c.type)) {
tables_with_columns_of_type.insert(pair<sqltype *, table *>(type, &t));
break;
}
}
}
for (auto &concrete : types) {
if (type->consistent(concrete))
concrete_type.insert(pair<sqltype *, sqltype *>(type, concrete));
}
for (auto &o : operators) {
if (type->consistent(o.result))
operators_returning_type.insert(pair<sqltype *, op *>(type, &o));
}
}
for (auto &t : tables) {
if (t.is_base_table)
base_tables.push_back(&t);
}
cerr << "done." << endl;
assert(booltype);
assert(inttype);
assert(internaltype);
assert(arraytype);
}
| 578 |
1,159 | {
"name": "vizzu-unit.test",
"scripts": {
"test": "node --experimental-vm-modules ./node_modules/jest/bin/jest.js --config=jest.config.js"
},
"type": "module",
"dependencies": {
"jest": "*"
}
}
| 116 |
1,688 | <reponame>bertilnilsson/TypeScript-Sublime-Plugin<gh_stars>1000+
from ..libs import *
from .event_hub import EventHub
class RenameEventListener:
def on_load(self, view):
client_info = cli.get_or_add_file(view.file_name())
# finish the renaming
if client_info and client_info.rename_on_load:
view.run_command(
'typescript_delayed_rename_file',
{"locs_name": client_info.rename_on_load}
)
client_info.rename_on_load = None
listener = RenameEventListener()
EventHub.subscribe("on_load", listener.on_load) | 271 |
1,338 | /*
* Copyright 2004-2008, Haiku.
* Distributed under the terms of the MIT License.
*
* Authors:
* <NAME>
*/
#ifndef TEAM_LIST_ITEM_H
#define TEAM_LIST_ITEM_H
#include <Bitmap.h>
#include <ListItem.h>
#include <Path.h>
#include <Roster.h>
#include <String.h>
extern bool gLocalizedNamePreferred;
class TeamListItem : public BListItem {
public:
TeamListItem(team_info& info);
virtual ~TeamListItem();
virtual void DrawItem(BView* owner, BRect frame,
bool complete = false);
virtual void Update(BView* owner, const BFont* font);
void CacheLocalizedName();
const team_info* GetInfo();
const BBitmap* LargeIcon() { return &fLargeIcon; };
const BPath* Path() { return &fPath; };
const char* AppSignature() { return fAppInfo.signature; };
bool IsSystemServer();
bool IsApplication() const;
bool Found() const { return fFound; }
void SetFound(bool found) { fFound = found; }
void SetRefusingToQuit(bool refusing);
bool IsRefusingToQuit();
static int32 MinimalHeight();
private:
team_info fTeamInfo;
app_info fAppInfo;
BBitmap fMiniIcon;
BBitmap fLargeIcon;
BPath fPath;
BString fLocalizedName;
bool fFound;
bool fRefusingToQuit;
};
#endif // TEAM_LIST_ITEM_H
| 570 |
1,545 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.bookkeeper.client;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Iterator;
import org.apache.bookkeeper.client.BookKeeper.DigestType;
import org.apache.bookkeeper.conf.ClientConfiguration;
import org.apache.bookkeeper.test.BookKeeperClusterTestCase;
import org.junit.Test;
/**
* Test ListLedgers.
*/
public class ListLedgersTest extends BookKeeperClusterTestCase {
private final DigestType digestType;
public ListLedgersTest () {
super(4);
this.digestType = DigestType.CRC32;
}
@Test
public void testListLedgers()
throws Exception {
int numOfLedgers = 10;
ClientConfiguration conf = new ClientConfiguration();
conf.setMetadataServiceUri(zkUtil.getMetadataServiceUri());
BookKeeper bkc = new BookKeeper(conf);
for (int i = 0; i < numOfLedgers; i++) {
bkc.createLedger(digestType, "testPasswd".
getBytes()).close();
}
BookKeeperAdmin admin = new BookKeeperAdmin(zkUtil.
getZooKeeperConnectString());
Iterable<Long> iterable = admin.listLedgers();
int counter = 0;
for (Long lId: iterable) {
counter++;
}
assertTrue("Wrong number of ledgers: " + numOfLedgers,
counter == numOfLedgers);
}
@Test
public void testEmptyList()
throws Exception {
ClientConfiguration conf = new ClientConfiguration();
conf.setMetadataServiceUri(zkUtil.getMetadataServiceUri());
BookKeeperAdmin admin = new BookKeeperAdmin(zkUtil.
getZooKeeperConnectString());
Iterable<Long> iterable = admin.listLedgers();
assertFalse("There should be no ledger", iterable.iterator().hasNext());
}
@Test
public void testRemoveNotSupported()
throws Exception {
int numOfLedgers = 1;
ClientConfiguration conf = new ClientConfiguration();
conf.setMetadataServiceUri(zkUtil.getMetadataServiceUri());
BookKeeper bkc = new BookKeeper(conf);
for (int i = 0; i < numOfLedgers; i++) {
bkc.createLedger(digestType, "testPasswd".
getBytes()).close();
}
BookKeeperAdmin admin = new BookKeeperAdmin(zkUtil.
getZooKeeperConnectString());
Iterator<Long> iterator = admin.listLedgers().iterator();
iterator.next();
try {
iterator.remove();
} catch (UnsupportedOperationException e) {
// This exception is expected
return;
}
fail("Remove is not supported, we shouln't have reached this point");
}
}
| 1,338 |
5,169 | <filename>Specs/DataPersistence/0.1.0/DataPersistence.podspec.json
{
"name": "DataPersistence",
"version": "0.1.0",
"summary": "DataPersistence is a drop in class that allows easy persistance of data using NSKeyedArchiver/NSKeyedUnarchiver.",
"homepage": "https://github.com/tomdiggle/datapersistence",
"license": "MIT",
"authors": {
"<NAME>": "<EMAIL>"
},
"social_media_url": "https://twitter.com/tomdiggle",
"source": {
"git": "https://github.com/tomdiggle/datapersistence.git",
"tag": "0.1.0"
},
"requires_arc": true,
"platforms": {
"ios": "9.0"
},
"source_files": "DataPersistence"
}
| 246 |
375 | <gh_stars>100-1000
{
"id": 56,
"logo_path": "/mpGWkDeikYLeAR8LVNaixig0vwE.png",
"description": null,
"name": "<NAME>",
"parent_company": null,
"homepage": null,
"headquarters": null
} | 88 |
743 | <gh_stars>100-1000
{"actions":[{"title":"OK","type":"Action.Submit"}],"body":[{"choices":[{"title":"Red","value":"1"},{"title":"Green","value":"2"},{"title":"Blue","value":"3"}],"id":"myColor4","isMultiSelect":true,"style":"Expanded","type":"Input.ChoiceSet","value":"1","wrap":true}],"type":"AdaptiveCard","version":"1.0"} | 96 |
2,151 | <filename>third_party/android_tools/sdk/sources/android-25/android/text/method/DateTimeKeyListener.java
/*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.text.method;
import android.text.InputType;
import android.view.KeyEvent;
/**
* For entering dates and times in the same text field.
* <p></p>
* As for all implementations of {@link KeyListener}, this class is only concerned
* with hardware keyboards. Software input methods have no obligation to trigger
* the methods in this class.
*/
public class DateTimeKeyListener extends NumberKeyListener
{
public int getInputType() {
return InputType.TYPE_CLASS_DATETIME
| InputType.TYPE_DATETIME_VARIATION_NORMAL;
}
@Override
protected char[] getAcceptedChars()
{
return CHARACTERS;
}
public static DateTimeKeyListener getInstance() {
if (sInstance != null)
return sInstance;
sInstance = new DateTimeKeyListener();
return sInstance;
}
/**
* The characters that are used.
*
* @see KeyEvent#getMatch
* @see #getAcceptedChars
*/
public static final char[] CHARACTERS = new char[] {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'm',
'p', ':', '/', '-', ' '
};
private static DateTimeKeyListener sInstance;
}
| 666 |
380 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
'''Chemical Engineering Design Library (ChEDL). Utilities for process modeling.
Copyright (C) 2019, 2020, 2021 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
__all__ = [
'Phase',
'derivatives_thermodynamic',
'derivatives_thermodynamic_mass',
'derivatives_jacobian',
]
from fluids.constants import R, R_inv
from math import sqrt
from thermo.serialize import arrays_to_lists
from fluids.numerics import (horner, horner_log, jacobian,
poly_fit_integral_value, poly_fit_integral_over_T_value,
newton_system, trunc_exp, is_micropython)
from chemicals.utils import (log, Cp_minus_Cv, phase_identification_parameter,
Joule_Thomson, speed_of_sound, dxs_to_dns, dns_to_dn_partials,
hash_any_primitive)
from thermo.utils import POLY_FIT
from thermo import phases
from .phase_utils import object_lookups
class Phase(object):
'''`Phase` is the base class for all phase objects in `thermo`. Each
sub-class implements a number of core properties; many other properties
can be calculated from them.
Among those properties are `H`, `S`, `Cp`, `dP_dT`, `dP_dV`,
`d2P_dT2`, `d2P_dV2`, and `d2P_dTdV`.
An additional set of properties that can be implemented and that enable
more functionality are `dH_dP`, `dS_dT`, `dS_dP`, `d2H_dT2`, `d2H_dP2`,
`d2S_dP2`, `dH_dT_V`, `dH_dP_V`, `dH_dV_T`, `dH_dV_P`, `dS_dT_V`,
`dS_dP_V`, `d2H_dTdP`, `d2H_dT2_V`, `d2P_dTdP`, `d2P_dVdP`, `d2P_dVdT_TP`,
`d2P_dT2_PV`.
Some models may re-implement properties which would normally be
calculated by this `Phase` base class because they have more explicit,
faster ways of calculating the property.
When a phase object is the result of a Flash calculation, the resulting
phase objects have a reference to a
:obj:`ChemicalConstantsPackage <thermo.chemical_package.ChemicalConstantsPackage>`
object and all of its properties can be accessed from from the resulting
phase objects as well.
A :obj:`ChemicalConstantsPackage <thermo.chemical_package.ChemicalConstantsPackage>`
object can also be manually set to the attribute `constants` to enable
access to those properties. This includes mass-based properties, which are
not accessible from Phase objects without a reference to the constants.
'''
INCOMPRESSIBLE_CONST = 1e30
R = R
R2 = R*R
R_inv = R_inv
is_solid = False
ideal_gas_basis = False # Parameter fot has the same ideal gas Cp
T_REF_IG = 298.15
T_REF_IG_INV = 1.0/T_REF_IG
'''The numerical inverse of :obj:`T_REF_IG`, stored to save a division.
'''
P_REF_IG = 101325.
P_REF_IG_INV = 1.0/P_REF_IG
LOG_P_REF_IG = log(P_REF_IG)
T_MAX_FIXED = 10000.0
T_MIN_FIXED = 1e-3
P_MAX_FIXED = 1e9
P_MIN_FIXED = 1e-2 # 1e-3 was so low issues happened in the root stuff, could not be fixed
V_MIN_FIXED = 1e-9 # m^3/mol
V_MAX_FIXED = 1e9 # m^#/mol
T_MIN_FLASH = 1e-300
force_phase = None
'''Attribute which can be set to a global Phase object to force the phases
identification routines to label it a certain phase. Accepts values of ('g', 'l', 's').'''
_Psats_data = None
_Cpgs_data = None
Psats_poly_fit = False
Cpgs_poly_fit = False
composition_independent = False
scalar = True
pure_references = ()
'''Tuple of attribute names which hold lists of :obj:`thermo.utils.TDependentProperty`
or :obj:`thermo.utils.TPDependentProperty` instances.'''
pure_reference_types = ()
'''Tuple of types of :obj:`thermo.utils.TDependentProperty`
or :obj:`thermo.utils.TPDependentProperty` corresponding to `pure_references`.'''
obj_references = ()
'''Tuple of object instances which should be stored as json using their own
as_json method.
'''
pointer_references = ()
'''Tuple of attributes which should be stored by converting them to
a string, and then they will be looked up in their corresponding
`pointer_reference_dicts` entry.
'''
pointer_reference_dicts = ()
'''Tuple of dictionaries for string -> object
'''
reference_pointer_dicts = ()
'''Tuple of dictionaries for object -> string
'''
if not is_micropython:
def __init_subclass__(cls):
cls.__full_path__ = "%s.%s" %(cls.__module__, cls.__qualname__)
else:
__full_path__ = None
def __str__(self):
s = '<%s, ' %(self.__class__.__name__)
try:
s += 'T=%g K, P=%g Pa' %(self.T, self.P)
except:
pass
s += '>'
return s
def as_json(self):
r'''Method to create a JSON-friendly serialization of the phase
which can be stored, and reloaded later.
Returns
-------
json_repr : dict
JSON-friendly representation, [-]
Notes
-----
Examples
--------
>>> import json
>>> from thermo import IAPWS95Liquid
>>> phase = IAPWS95Liquid(T=300, P=1e5, zs=[1])
>>> new_phase = Phase.from_json(json.loads(json.dumps(phase.as_json())))
>>> assert phase == new_phase
'''
d = self.__dict__.copy()
if not self.scalar:
d = arrays_to_lists(d)
for obj_name in self.obj_references:
o = d[obj_name]
if type(o) is list:
d[obj_name] = [v.as_json() for v in o]
else:
d[obj_name] = o.as_json()
for prop_name in self.pure_references:
# Known issue: references to other properties
# Needs special fixing - maybe a function
l = d[prop_name]
if l:
d[prop_name] = [v.as_json() for v in l]
for ref_name, ref_lookup in zip(self.pointer_references, self.reference_pointer_dicts):
d[ref_name] = ref_lookup[d[ref_name]]
d["py/object"] = self.__full_path__
d['json_version'] = 1
return d
@classmethod
def from_json(cls, json_repr):
r'''Method to create a phase from a JSON
serialization of another phase.
Parameters
----------
json_repr : dict
JSON-friendly representation, [-]
Returns
-------
phase : :obj:`Phase`
Newly created phase object from the json serialization, [-]
Notes
-----
It is important that the input string be in the same format as that
created by :obj:`Phase.as_json`.
Examples
--------
'''
d = json_repr
phase_name = d['py/object']
del d['py/object']
del d['json_version']
phase = phases.phase_full_path_dict[phase_name]
new = phase.__new__(phase)
for obj_name, obj_cls in zip(new.pure_references, new.pure_reference_types):
l = d[obj_name]
if l:
for i, v in enumerate(l):
l[i] = obj_cls.from_json(v)
for obj_name in new.obj_references:
o = d[obj_name]
if type(o) is list:
d[obj_name] = [object_lookups[v['py/object']].from_json(v) for v in o]
else:
obj_cls = object_lookups[o['py/object']]
d[obj_name] = obj_cls.from_json(o)
for ref_name, ref_lookup in zip(new.pointer_references, new.pointer_reference_dicts):
d[ref_name] = ref_lookup[d[ref_name]]
new.__dict__ = d
return new
def __hash__(self):
r'''Method to calculate and return a hash representing the exact state
of the object.
Returns
-------
hash : int
Hash of the object, [-]
'''
# Ensure the hash is set so it is always part of the object hash
self.model_hash(False)
self.model_hash(True)
self.state_hash()
d = self.__dict__
ans = hash_any_primitive((self.__class__.__name__, d))
return ans
def __eq__(self, other):
return self.__hash__() == hash(other)
def state_hash(self):
r'''Basic method to calculate a hash of the state of the phase and its
model parameters.
Note that the hashes should only be compared on the same system running
in the same process!
Returns
-------
state_hash : int
Hash of the object's model parameters and state, [-]
'''
return hash_any_primitive((self.model_hash(), self.T, self.P, self.V(), self.zs))
def model_hash(self, ignore_phase=False):
r'''Method to compute a hash of a phase.
Parameters
----------
ignore_phase : bool
Whether or not to include the specifc class of the model in the
hash
Returns
-------
hash : int
Hash representing the settings of the phase; phases with all
identical model parameters should have the same hash.
'''
if ignore_phase:
try:
return self._model_hash_ignore_phase
except AttributeError:
pass
else:
try:
return self._model_hash
except AttributeError:
pass
# Note: not all attributes are in __dict__, must use getattr
to_hash = [getattr(self, v) for v in self.model_attributes]
self._model_hash_ignore_phase = h = hash_any_primitive(to_hash)
self._model_hash = hash((self.__class__.__name__, h))
if ignore_phase:
return self._model_hash_ignore_phase
else:
return self._model_hash
def value(self, name):
r'''Method to retrieve a property from a string. This more or less
wraps `getattr`.
`name` could be a python property like 'Tms' or a callable method
like 'H'.
Parameters
----------
name : str
String representing the property, [-]
Returns
-------
value : various
Value specified, [various]
Notes
-----
'''
if name in ('beta_mass',):
return self.result.value(name, self)
v = getattr(self, name)
try:
v = v()
except:
pass
return v
### Methods that should be implemented by subclasses
def to_TP_zs(self, T, P, zs):
r'''Method to create a new Phase object with the same constants as the
existing Phase but at a different `T` and `P`.
Parameters
----------
zs : list[float]
Molar composition of the new phase, [-]
T : float
Temperature of the new phase, [K]
P : float
Pressure of the new phase, [Pa]
Returns
-------
new_phase : Phase
New phase at the specified conditions, [-]
Notes
-----
This method is marginally faster than :obj:`Phase.to` as it does not
need to check what the inputs are.
Examples
--------
>>> from thermo import IdealGas
>>> phase = IdealGas(T=300, P=1e5, zs=[.79, .21], HeatCapacityGases=[])
>>> phase.to_TP_zs(T=1e5, P=1e3, zs=[.5, .5])
IdealGas(HeatCapacityGases=[], T=100000.0, P=1000.0, zs=[0.5, 0.5])
'''
raise NotImplementedError("Must be implemented by subphases")
def to(self, zs, T=None, P=None, V=None):
r'''Method to create a new Phase object with the same constants as the
existing Phase but at different conditions. Mole fractions `zs` are
always required and any two of `T`, `P`, and `V` are required.
Parameters
----------
zs : list[float]
Molar composition of the new phase, [-]
T : float, optional
Temperature of the new phase, [K]
P : float, optional
Pressure of the new phase, [Pa]
V : float, optional
Molar volume of the new phase, [m^3/mol]
Returns
-------
new_phase : Phase
New phase at the specified conditions, [-]
Notes
-----
Examples
--------
These sample cases illustrate the three combinations of inputs.
Note that some thermodynamic models may have multiple solutions for
some inputs!
>>> from thermo import IdealGas
>>> phase = IdealGas(T=300, P=1e5, zs=[.79, .21], HeatCapacityGases=[])
>>> phase.to(T=1e5, P=1e3, zs=[.5, .5])
IdealGas(HeatCapacityGases=[], T=100000.0, P=1000.0, zs=[0.5, 0.5])
>>> phase.to(V=1e-4, P=1e3, zs=[.1, .9])
IdealGas(HeatCapacityGases=[], T=0.012027235504, P=1000.0, zs=[0.1, 0.9])
>>> phase.to(T=1e5, V=1e12, zs=[.2, .8])
IdealGas(HeatCapacityGases=[], T=100000.0, P=8.31446261e-07, zs=[0.2, 0.8])
'''
raise NotImplementedError("Must be implemented by subphases")
def V(self):
r'''Method to return the molar volume of the phase.
Returns
-------
V : float
Molar volume, [m^3/mol]
'''
raise NotImplementedError("Must be implemented by subphases")
def dP_dT(self):
r'''Method to calculate and return the first temperature derivative of
pressure of the phase.
Returns
-------
dP_dT : float
First temperature derivative of pressure, [Pa/K]
'''
raise NotImplementedError("Must be implemented by subphases")
def dP_dV(self):
r'''Method to calculate and return the first volume derivative of
pressure of the phase.
Returns
-------
dP_dV : float
First volume derivative of pressure, [Pa*mol/m^3]
'''
raise NotImplementedError("Must be implemented by subphases")
def d2P_dT2(self):
r'''Method to calculate and return the second temperature derivative of
pressure of the phase.
Returns
-------
d2P_dT2 : float
Second temperature derivative of pressure, [Pa/K^2]
'''
raise NotImplementedError("Must be implemented by subphases")
def d2P_dV2(self):
r'''Method to calculate and return the second volume derivative of
pressure of the phase.
Returns
-------
d2P_dV2 : float
Second volume derivative of pressure, [Pa*mol^2/m^6]
'''
raise NotImplementedError("Must be implemented by subphases")
def d2P_dTdV(self):
r'''Method to calculate and return the second derivative of
pressure with respect to temperature and volume of the phase.
Returns
-------
d2P_dTdV : float
Second volume derivative of pressure, [mol*Pa^2/(J*K)]
'''
raise NotImplementedError("Must be implemented by subphases")
def lnphis(self):
r'''Method to calculate and return the log of fugacity coefficients of
each component in the phase.
Returns
-------
lnphis : list[float]
Log fugacity coefficients, [-]
'''
raise NotImplementedError("Must be implemented by subphases")
def dlnphis_dT(self):
r'''Method to calculate and return the temperature derivative of the
log of fugacity coefficients of each component in the phase.
Returns
-------
dlnphis_dT : list[float]
First temperature derivative of log fugacity coefficients, [1/K]
'''
raise NotImplementedError("Must be implemented by subphases")
def dlnphis_dP(self):
r'''Method to calculate and return the pressure derivative of the
log of fugacity coefficients of each component in the phase.
Returns
-------
dlnphis_dP : list[float]
First pressure derivative of log fugacity coefficients, [1/Pa]
'''
raise NotImplementedError("Must be implemented by subphases")
def H(self):
r'''Method to calculate and return the enthalpy of the phase.
The reference state for most subclasses is an ideal-gas enthalpy of
zero at 298.15 K and 101325 Pa.
Returns
-------
H : float
Molar enthalpy, [J/(mol)]
'''
raise NotImplementedError("Must be implemented by subphases")
def S(self):
r'''Method to calculate and return the entropy of the phase.
The reference state for most subclasses is an ideal-gas entropy of
zero at 298.15 K and 101325 Pa.
Returns
-------
S : float
Molar entropy, [J/(mol*K)]
'''
raise NotImplementedError("Must be implemented by subphases")
def Cp(self):
r'''Method to calculate and return the constant-pressure heat capacity
of the phase.
Returns
-------
Cp : float
Molar heat capacity, [J/(mol*K)]
'''
raise NotImplementedError("Must be implemented by subphases")
### Benchmarking methods
def _compute_main_properties(self):
'''Method which computes some basic properties. For benchmarking;
accepts no arguments and returns nothing. A timer should be used
outside of this method.
'''
self.H()
self.S()
self.Cp()
self.Cv()
self.dP_dT()
self.dP_dV()
self.d2P_dT2()
self.d2P_dV2()
self.d2P_dTdV()
self.PIP()
### Consistency Checks
def S_phi_consistency(self):
r'''Method to calculate and return a consistency check between ideal
gas entropy behavior, and the fugacity coefficients and their
temperature derivatives.
.. math::
S = S^{ig} - \sum_{i} z_i R\left(\ln \phi_i + T \frac{\partial \ln
\phi_i}{\partial T}\right)
Returns
-------
error : float
Relative consistency error
:math:`|1 - S^{\text{from phi}}/S^\text{implemented}|`, [-]
'''
# From coco
S0 = self.S_ideal_gas()
lnphis = self.lnphis()
dlnphis_dT = self.dlnphis_dT()
T, zs = self.T, self.zs
for i in range(self.N):
S0 -= zs[i]*(R*lnphis[i] + R*T*dlnphis_dT[i])
return abs(1.0 - S0/self.S())
def H_phi_consistency(self):
r'''Method to calculate and return a consistency check between ideal
gas enthalpy behavior, and the fugacity coefficients and their
temperature derivatives.
.. math::
H^{\text{from phi}} = H^{ig} - RT^2\sum_i z_i \frac{\partial \ln
\phi_i}{\partial T}
Returns
-------
error : float
Relative consistency error
:math:`|1 - H^{\text{from phi}}/H^\text{implemented}|`, [-]
'''
return abs(1.0 - self.H_from_phi()/self.H())
def G_dep_phi_consistency(self):
r'''Method to calculate and return a consistency check between
departure Gibbs free energy, and the fugacity coefficients.
.. math::
G^{\text{from phi}}_{dep} = RT\sum_i z_i \phi_i
Returns
-------
error : float
Relative consistency error
:math:`|1 - G^{\text{from phi}}_{dep}/G^\text{implemented}_{dep}|`, [-]
'''
# Chapter 2 equation 31 Michaelson
zs, T = self.zs, self.T
G_dep_RT = 0.0
lnphis = self.lnphis()
G_dep_RT = sum(zs[i]*lnphis[i] for i in range(self.N))
G_dep = G_dep_RT*R*T
return abs(1.0 - G_dep/self.G_dep())
def H_dep_phi_consistency(self):
r'''Method to calculate and return a consistency check between
departure enthalpy, and the fugacity coefficients' temperature
derivatives.
.. math::
H^{\text{from phi}}_{dep} = -RT^2\sum_i z_i \frac{\partial \ln
\phi_i}{\partial T}
Returns
-------
error : float
Relative consistency error
:math:`|1 - H^{\text{from phi}}_{dep}/H^\text{implemented}_{dep}|`, [-]
'''
H_dep_RT2 = 0.0
dlnphis_dTs = self.dlnphis_dT()
zs, T = self.zs, self.T
H_dep_RT2 = sum([zs[i]*dlnphis_dTs[i] for i in range(self.N)])
H_dep_recalc = -H_dep_RT2*R*T*T
H_dep = self.H_dep()
return abs(1.0 - H_dep/H_dep_recalc)
def S_dep_phi_consistency(self):
r'''Method to calculate and return a consistency check between ideal
gas entropy behavior, and the fugacity coefficients and their
temperature derivatives.
.. math::
S_{dep}^{\text{from phi}} = - \sum_{i} z_i R\left(\ln \phi_i
+ T \frac{\partial \ln \phi_i}{\partial T}\right)
Returns
-------
error : float
Relative consistency error
:math:`|1 - S^{\text{from phi}}_{dep}/S^\text{implemented}_{dep}|`, [-]
'''
# From coco
lnphis = self.lnphis()
dlnphis_dT = self.dlnphis_dT()
T, zs = self.T, self.zs
S_dep = 0.0
for i in range(self.N):
S_dep -= zs[i]*(R*lnphis[i] + R*T*dlnphis_dT[i])
return abs(1.0 - S_dep/self.S_dep())
def V_phi_consistency(self):
r'''Method to calculate and return a consistency check between
molar volume, and the fugacity coefficients' pressures
derivatives.
.. math::
V^{\text{from phi P der}} = \left(\left(\sum_i z_i \frac{\partial \ln
\phi_i}{\partial P}\right)P + 1\right)RT/P
Returns
-------
error : float
Relative consistency error
:math:`|1 - V^{\text{from phi P der}}/V^\text{implemented}|`, [-]
'''
zs, P = self.zs, self.P
dlnphis_dP = self.dlnphis_dP()
lhs = sum(zs[i]*dlnphis_dP[i] for i in range(self.N))
Z_calc = lhs*P + 1.0
V_calc = Z_calc*self.R*self.T/P
V = self.V()
return abs(1.0 - V_calc/V)
def H_from_phi(self):
r'''Method to calculate and return the enthalpy of the fluid as
calculated from the ideal-gas enthalpy and the the fugacity
coefficients' temperature derivatives.
.. math::
H^{\text{from phi}} = H^{ig} - RT^2\sum_i z_i \frac{\partial \ln
\phi_i}{\partial T}
Returns
-------
H : float
Enthalpy as calculated from fugacity coefficient temperature
derivatives [J/mol]
'''
H0 = self.H_ideal_gas()
dlnphis_dT = self.dlnphis_dT()
T, zs = self.T, self.zs
for i in range(self.N):
H0 -= R*T*T*zs[i]*dlnphis_dT[i]
return H0
def S_from_phi(self):
r'''Method to calculate and return the entropy of the fluid as
calculated from the ideal-gas entropy and the the fugacity
coefficients' temperature derivatives.
.. math::
S = S^{ig} - \sum_{i} z_i R\left(\ln \phi_i + T \frac{\partial \ln
\phi_i}{\partial T}\right)
Returns
-------
S : float
Entropy as calculated from fugacity coefficient temperature
derivatives [J/(mol*K)]
'''
S0 = self.S_ideal_gas()
lnphis = self.lnphis()
dlnphis_dT = self.dlnphis_dT()
T, zs = self.T, self.zs
for i in range(self.N):
S0 -= zs[i]*(R*lnphis[i] + R*T*dlnphis_dT[i])
return S0
def V_from_phi(self):
r'''Method to calculate and return the molar volume of the fluid as
calculated from the pressure derivatives of fugacity coefficients.
.. math::
V^{\text{from phi P der}} = \left(\left(\sum_i z_i \frac{\partial \ln
\phi_i}{\partial P}\right)P + 1\right)RT/P
Returns
-------
V : float
Molar volume, [m^3/mol]
'''
zs, P = self.zs, self.P
dlnphis_dP = self.dlnphis_dP()
obj = sum(zs[i]*dlnphis_dP[i] for i in range(self.N))
Z = P*obj + 1.0
return Z*self.R*self.T/P
def G_min_criteria(self):
r'''Method to calculate and return the Gibbs energy criteria required
for comparing phase stability. This calculation can be faster
than calculating the full Gibbs energy. For this comparison to work,
all phases must use the ideal gas basis.
.. math::
G^{\text{criteria}} = G^{dep} + RT\sum_i z_i \ln z_i
Returns
-------
G_crit : float
Gibbs free energy like criteria [J/mol]
'''
# Definition implemented that does not use the H, or S ideal gas contribution
# Allows for faster checking of which phase is at lowest G, but can only
# be used when all models use an ideal gas basis
zs = self.zs
log_zs = self.log_zs()
G_crit = 0.0
for i in range(self.N):
G_crit += zs[i]*log_zs[i]
G_crit = G_crit*R*self.T + self.G_dep()
return G_crit
def lnphis_at_zs(self, zs):
r'''Method to directly calculate the log fugacity coefficients at a
different composition than the current phase.
This is implemented to allow for the possibility of more direct
calls to obtain fugacities than is possible with the phase interface.
This base method simply creates a new phase, gets its log fugacity
coefficients, and returns them.
Returns
-------
lnphis : list[float]
Log fugacity coefficients, [-]
'''
return self.to_TP_zs(self.T, self.P, zs).lnphis()
def fugacities_at_zs(self, zs):
r'''Method to directly calculate the figacities at a
different composition than the current phase.
This is implemented to allow for the possibility of more direct
calls to obtain fugacities than is possible with the phase interface.
This base method simply creates a new phase, gets its log fugacity
coefficients, exponentiates them, and multiplies them by `P` and
compositions.
Returns
-------
fugacities : list[float]
Fugacities, [Pa]
'''
P = self.P
lnphis = self.lnphis_at_zs(zs)
return [P*zs[i]*trunc_exp(lnphis[i]) for i in range(self.N)]
def lnphi(self):
r'''Method to calculate and return the log of fugacity coefficient of
the phase; provided the phase is 1 component.
Returns
-------
lnphi : list[float]
Log fugacity coefficient, [-]
'''
if self.N != 1:
raise ValueError("Property not supported for multicomponent phases")
return self.lnphis()[0]
def phi(self):
r'''Method to calculate and return the fugacity coefficient of
the phase; provided the phase is 1 component.
Returns
-------
phi : list[float]
Fugacity coefficient, [-]
'''
if self.N != 1:
raise ValueError("Property not supported for multicomponent phases")
return self.phis()[0]
def fugacity(self):
r'''Method to calculate and return the fugacity of
the phase; provided the phase is 1 component.
Returns
-------
fugacity : list[float]
Fugacity, [Pa]
'''
if self.N != 1:
raise ValueError("Property not supported for multicomponent phases")
return self.fugacities()[0]
def dfugacity_dT(self):
r'''Method to calculate and return the temperature derivative of
fugacity of the phase; provided the phase is 1 component.
Returns
-------
dfugacity_dT : list[float]
Fugacity first temperature derivative, [Pa/K]
'''
if self.N != 1:
raise ValueError("Property not supported for multicomponent phases")
return self.dfugacities_dT()[0]
def dfugacity_dP(self):
r'''Method to calculate and return the pressure derivative of
fugacity of the phase; provided the phase is 1 component.
Returns
-------
dfugacity_dP : list[float]
Fugacity first pressure derivative, [-]
'''
if self.N != 1:
raise ValueError("Property not supported for multicomponent phases")
return self.dfugacities_dP()[0]
def fugacities(self):
r'''Method to calculate and return the fugacities of the phase.
.. math::
f_i = P z_i \exp(\ln \phi_i)
Returns
-------
fugacities : list[float]
Fugacities, [Pa]
'''
P = self.P
zs = self.zs
lnphis = self.lnphis()
return [P*zs[i]*trunc_exp(lnphis[i]) for i in range(self.N)]
def lnfugacities(self):
r'''Method to calculate and return the log of fugacities of the phase.
.. math::
\ln f_i = \ln\left( P z_i \exp(\ln \phi_i)\right)
= \ln(P) + \ln(z_i) + \ln \phi_i
Returns
-------
lnfugacities : list[float]
Log fugacities, [log(Pa)]
'''
P = self.P
lnphis = self.lnphis()
logP = log(P)
log_zs = self.log_zs()
return [logP + log_zs[i] + lnphis[i] for i in range(self.N)]
fugacities_lowest_Gibbs = fugacities
def dfugacities_dT(self):
r'''Method to calculate and return the temperature derivative of fugacities
of the phase.
.. math::
\frac{\partial f_i}{\partial T} = P z_i \frac{\partial
\ln \phi_i}{\partial T}
Returns
-------
dfugacities_dT : list[float]
Temperature derivative of fugacities of all components
in the phase, [Pa/K]
Notes
-----
'''
dphis_dT = self.dphis_dT()
P, zs = self.P, self.zs
return [P*zs[i]*dphis_dT[i] for i in range(self.N)]
def lnphis_G_min(self):
r'''Method to calculate and return the log fugacity coefficients of the
phase. If the phase can have multiple solutions at its `T` and `P`,
this method should return those with the lowest Gibbs energy. This
needs to be implemented on phases with that criteria like cubic EOSs.
Returns
-------
lnphis : list[float]
Log fugacity coefficients, [-]
'''
return self.lnphis()
def phis(self):
r'''Method to calculate and return the fugacity coefficients of the
phase.
.. math::
\phi_i = \exp (\ln \phi_i)
Returns
-------
phis : list[float]
Fugacity coefficients, [-]
'''
return [trunc_exp(i) for i in self.lnphis()]
def dphis_dT(self):
r'''Method to calculate and return the temperature derivative of fugacity
coefficients of the phase.
.. math::
\frac{\partial \phi_i}{\partial T} = \phi_i \frac{\partial
\ln \phi_i}{\partial T}
Returns
-------
dphis_dT : list[float]
Temperature derivative of fugacity coefficients of all components
in the phase, [1/K]
Notes
-----
'''
try:
return self._dphis_dT
except AttributeError:
pass
try:
dlnphis_dT = self._dlnphis_dT
except AttributeError:
dlnphis_dT = self.dlnphis_dT()
try:
phis = self._phis
except AttributeError:
phis = self.phis()
self._dphis_dT = [dlnphis_dT[i]*phis[i] for i in range(self.N)]
return self._dphis_dT
def dphis_dP(self):
r'''Method to calculate and return the pressure derivative of fugacity
coefficients of the phase.
.. math::
\frac{\partial \phi_i}{\partial P} = \phi_i \frac{\partial
\ln \phi_i}{\partial P}
Returns
-------
dphis_dP : list[float]
Pressure derivative of fugacity coefficients of all components
in the phase, [1/Pa]
Notes
-----
'''
try:
return self._dphis_dP
except AttributeError:
pass
try:
dlnphis_dP = self._dlnphis_dP
except AttributeError:
dlnphis_dP = self.dlnphis_dP()
try:
phis = self._phis
except AttributeError:
phis = self.phis()
self._dphis_dP = [dlnphis_dP[i]*phis[i] for i in range(self.N)]
return self._dphis_dP
def dphis_dzs(self):
r'''Method to calculate and return the molar composition derivative of
fugacity coefficients of the phase.
.. math::
\frac{\partial \phi_i}{\partial z_j} = \phi_i \frac{\partial
\ln \phi_i}{\partial z_j}
Returns
-------
dphis_dzs : list[list[float]]
Molar derivative of fugacity coefficients of all components
in the phase, [-]
Notes
-----
'''
try:
return self._dphis_dzs
except AttributeError:
pass
try:
dlnphis_dzs = self._dlnphis_dzs
except AttributeError:
dlnphis_dzs = self.dlnphis_dzs()
try:
phis = self._phis
except AttributeError:
phis = self.phis()
N = self.N
self._dphis_dzs = [[dlnphis_dzs[i][j]*phis[i] for j in range(N)]
for i in range(N)]
return self._dphis_dzs
def dfugacities_dP(self):
r'''Method to calculate and return the pressure derivative of the
fugacities of the components in the phase.
.. math::
\frac{\partial f_i}{\partial P} = z_i \left(P \frac{\partial
\phi_i}{\partial P} + \phi_i \right)
Returns
-------
dfugacities_dP : list[float]
Pressure derivative of fugacities of all components
in the phase, [-]
Notes
-----
For models without pressure dependence of fugacity, the returned result
may not be exactly zero due to inaccuracy in floating point results;
results are likely on the order of 1e-14 or lower in that case.
'''
try:
dphis_dP = self._dphis_dP
except AttributeError:
dphis_dP = self.dphis_dP()
try:
phis = self._phis
except AttributeError:
phis = self.phis()
P, zs = self.P, self.zs
return [zs[i]*(P*dphis_dP[i] + phis[i]) for i in range(self.N)]
def dfugacities_dns(self):
r'''Method to calculate and return the mole number derivative of the
fugacities of the components in the phase.
if i != j:
.. math::
\frac{\partial f_i}{\partial n_j} = P\phi_i z_i \left(
\frac{\partial \ln \phi_i}{\partial n_j} - 1
\right)
if i == j:
.. math::
\frac{\partial f_i}{\partial n_j} = P\phi_i z_i \left(
\frac{\partial \ln \phi_i}{\partial n_j} - 1
\right) + P\phi_i
Returns
-------
dfugacities_dns : list[list[float]]
Mole number derivatives of the fugacities of all components
in the phase, [Pa/mol]
Notes
-----
'''
phis = self.phis()
dlnphis_dns = self.dlnphis_dns()
P, zs = self.P, self.zs,
matrix = []
cmps = range(self.N)
for i in cmps:
phi_P = P*phis[i]
ziPphi = phi_P*zs[i]
r = dlnphis_dns[i]
row = [ziPphi*(r[j] - 1.0) for j in cmps]
row[i] += phi_P
matrix.append(row)
return matrix
def dlnfugacities_dns(self):
r'''Method to calculate and return the mole number derivative of the
log of fugacities of the components in the phase.
.. math::
\frac{\partial \ln f_i}{\partial n_j} = \frac{1}{f_i}
\frac{\partial f_i}{\partial n_j}
Returns
-------
dlnfugacities_dns : list[list[float]]
Mole number derivatives of the log of fugacities of all components
in the phase, [log(Pa)/mol]
Notes
-----
'''
fugacities = self.fugacities()
dlnfugacities_dns = [list(i) for i in self.dfugacities_dns()]
fugacities_inv = [1.0/fi for fi in fugacities]
cmps = range(self.N)
for i in cmps:
r = dlnfugacities_dns[i]
for j in cmps:
r[j]*= fugacities_inv[i]
return dlnfugacities_dns
def dlnfugacities_dzs(self):
r'''Method to calculate and return the mole fraction derivative of the
log of fugacities of the components in the phase.
.. math::
\frac{\partial \ln f_i}{\partial z_j} = \frac{1}{f_i}
\frac{\partial f_i}{\partial z_j}
Returns
-------
dlnfugacities_dzs : list[list[float]]
Mole fraction derivatives of the log of fugacities of all components
in the phase, [log(Pa)]
Notes
-----
'''
fugacities = self.fugacities()
dlnfugacities_dzs = [list(i) for i in self.dfugacities_dzs()]
fugacities_inv = [1.0/fi for fi in fugacities]
cmps = range(self.N)
for i in cmps:
r = dlnfugacities_dzs[i]
for j in cmps:
r[j]*= fugacities_inv[i]
return dlnfugacities_dzs
def log_zs(self):
r'''Method to calculate and return the log of mole fractions specified.
These are used in calculating entropy and in many other formulas.
.. math::
\ln z_i
Returns
-------
log_zs : list[float]
Log of mole fractions, [-]
Notes
-----
'''
try:
return self._log_zs
except AttributeError:
pass
try:
self._log_zs = [log(zi) for zi in self.zs]
except ValueError:
self._log_zs = _log_zs = []
for zi in self.zs:
try:
_log_zs.append(log(zi))
except ValueError:
_log_zs.append(-690.7755278982137) # log(1e-300)
return self._log_zs
def V_iter(self, force=False):
r'''Method to calculate and return the volume of the phase in a way
suitable for a TV resolution to converge on the same pressure. This
often means the return value of this method is an mpmath `mpf`.
This dummy method simply returns the implemented V method.
Returns
-------
V : float or mpf
Molar volume, [m^3/mol]
Notes
-----
'''
return self.V()
def G(self):
r'''Method to calculate and return the Gibbs free energy of the phase.
.. math::
G = H - TS
Returns
-------
G : float
Gibbs free energy, [J/mol]
Notes
-----
'''
try:
return self._G
except AttributeError:
pass
G = self.H() - self.T*self.S()
self._G = G
return G
G_min = G
def U(self):
r'''Method to calculate and return the internal energy of the phase.
.. math::
U = H - PV
Returns
-------
U : float
Internal energy, [J/mol]
Notes
-----
'''
U = self.H() - self.P*self.V()
return U
def A(self):
r'''Method to calculate and return the Helmholtz energy of the phase.
.. math::
A = U - TS
Returns
-------
A : float
Helmholtz energy, [J/mol]
Notes
-----
'''
A = self.U() - self.T*self.S()
return A
def dH_dns(self):
r'''Method to calculate and return the mole number derivative of the
enthalpy of the phase.
.. math::
\frac{\partial H}{\partial n_i}
Returns
-------
dH_dns : list[float]
Mole number derivatives of the enthalpy of the phase,
[J/mol^2]
Notes
-----
'''
return dxs_to_dns(self.dH_dzs(), self.zs)
def dS_dns(self):
r'''Method to calculate and return the mole number derivative of the
entropy of the phase.
.. math::
\frac{\partial S}{\partial n_i}
Returns
-------
dS_dns : list[float]
Mole number derivatives of the entropy of the phase,
[J/(mol^2*K)]
Notes
-----
'''
return dxs_to_dns(self.dS_dzs(), self.zs)
def dG_dT(self):
r'''Method to calculate and return the constant-pressure
temperature derivative of Gibbs free energy.
.. math::
\left(\frac{\partial G}{\partial T}\right)_{P}
= -T\left(\frac{\partial S}{\partial T}\right)_{P}
- S + \left(\frac{\partial H}{\partial T}\right)_{P}
Returns
-------
dG_dT : float
Constant-pressure temperature derivative of Gibbs free energy,
[J/(mol*K)]
Notes
-----
'''
return -self.T*self.dS_dT() - self.S() + self.dH_dT()
dG_dT_P = dG_dT
def dG_dT_V(self):
r'''Method to calculate and return the constant-volume
temperature derivative of Gibbs free energy.
.. math::
\left(\frac{\partial G}{\partial T}\right)_{V}
= -T\left(\frac{\partial S}{\partial T}\right)_{V}
- S + \left(\frac{\partial H}{\partial T}\right)_{V}
Returns
-------
dG_dT_V : float
Constant-volume temperature derivative of Gibbs free energy,
[J/(mol*K)]
Notes
-----
'''
return -self.T*self.dS_dT_V() - self.S() + self.dH_dT_V()
def dG_dP(self):
r'''Method to calculate and return the constant-temperature
pressure derivative of Gibbs free energy.
.. math::
\left(\frac{\partial G}{\partial P}\right)_{T}
= -T\left(\frac{\partial S}{\partial P}\right)_{T}
+ \left(\frac{\partial H}{\partial P}\right)_{T}
Returns
-------
dG_dP : float
Constant-temperature pressure derivative of Gibbs free energy,
[J/(mol*Pa)]
Notes
-----
'''
return -self.T*self.dS_dP() + self.dH_dP()
dG_dP_T = dG_dP
def dG_dP_V(self):
r'''Method to calculate and return the constant-volume
pressure derivative of Gibbs free energy.
.. math::
\left(\frac{\partial G}{\partial P}\right)_{V}
= -T\left(\frac{\partial S}{\partial P}\right)_{V}
- S \left(\frac{\partial T}{\partial P}\right)_{V}
+ \left(\frac{\partial H}{\partial P}\right)_{V}
Returns
-------
dG_dP_V : float
Constant-volume pressure derivative of Gibbs free energy,
[J/(mol*Pa)]
Notes
-----
'''
return -self.T*self.dS_dP_V() - self.dT_dP()*self.S() + self.dH_dP_V()
def dG_dV_T(self):
r'''Method to calculate and return the constant-temperature
volume derivative of Gibbs free energy.
.. math::
\left(\frac{\partial G}{\partial V}\right)_{T}
= \left(\frac{\partial G}{\partial P}\right)_{T}
\left(\frac{\partial P}{\partial V}\right)_{T}
Returns
-------
dG_dV_T : float
Constant-temperature volume derivative of Gibbs free energy,
[J/(m^3)]
Notes
-----
'''
return self.dG_dP_T()*self.dP_dV()
def dG_dV_P(self):
r'''Method to calculate and return the constant-pressure
volume derivative of Gibbs free energy.
.. math::
\left(\frac{\partial G}{\partial V}\right)_{P}
= \left(\frac{\partial G}{\partial T}\right)_{P}
\left(\frac{\partial T}{\partial V}\right)_{P}
Returns
-------
dG_dV_P : float
Constant-pressure volume derivative of Gibbs free energy,
[J/(m^3)]
Notes
-----
'''
return self.dG_dT_P()*self.dT_dV()
def dU_dT(self):
r'''Method to calculate and return the constant-pressure
temperature derivative of internal energy.
.. math::
\left(\frac{\partial U}{\partial T}\right)_{P}
= -P \left(\frac{\partial V}{\partial T}\right)_{P}
+ \left(\frac{\partial H}{\partial T}\right)_{P}
Returns
-------
dU_dT : float
Constant-pressure temperature derivative of internal energy,
[J/(mol*K)]
Notes
-----
'''
return -self.P*self.dV_dT() + self.dH_dT()
dU_dT_P = dU_dT
def dU_dT_V(self):
r'''Method to calculate and return the constant-volume
temperature derivative of internal energy.
.. math::
\left(\frac{\partial U}{\partial T}\right)_{V}
= \left(\frac{\partial H}{\partial T}\right)_{V}
- V \left(\frac{\partial P}{\partial T}\right)_{V}
Returns
-------
dU_dT_V : float
Constant-volume temperature derivative of internal energy,
[J/(mol*K)]
Notes
-----
'''
return self.dH_dT_V() - self.V()*self.dP_dT()
def dU_dP(self):
r'''Method to calculate and return the constant-temperature
pressure derivative of internal energy.
.. math::
\left(\frac{\partial U}{\partial P}\right)_{T}
= -P \left(\frac{\partial V}{\partial P}\right)_{T}
- V + \left(\frac{\partial H}{\partial P}\right)_{T}
Returns
-------
dU_dP : float
Constant-temperature pressure derivative of internal energy,
[J/(mol*Pa)]
Notes
-----
'''
return -self.P*self.dV_dP() - self.V() + self.dH_dP()
dU_dP_T = dU_dP
def dU_dP_V(self):
r'''Method to calculate and return the constant-volume
pressure derivative of internal energy.
.. math::
\left(\frac{\partial U}{\partial P}\right)_{V}
= \left(\frac{\partial H}{\partial P}\right)_{V}
- V
Returns
-------
dU_dP_V : float
Constant-volume pressure derivative of internal energy,
[J/(mol*Pa)]
Notes
-----
'''
return self.dH_dP_V() - self.V()
def dU_dV_T(self):
r'''Method to calculate and return the constant-temperature
volume derivative of internal energy.
.. math::
\left(\frac{\partial U}{\partial V}\right)_{T}
= \left(\frac{\partial U}{\partial P}\right)_{T}
\left(\frac{\partial P}{\partial V}\right)_{T}
Returns
-------
dU_dV_T : float
Constant-temperature volume derivative of internal energy,
[J/(m^3)]
Notes
-----
'''
return self.dU_dP_T()*self.dP_dV()
def dU_dV_P(self):
r'''Method to calculate and return the constant-pressure
volume derivative of internal energy.
.. math::
\left(\frac{\partial U}{\partial V}\right)_{P}
= \left(\frac{\partial U}{\partial T}\right)_{P}
\left(\frac{\partial T}{\partial V}\right)_{P}
Returns
-------
dU_dV_P : float
Constant-pressure volume derivative of internal energy,
[J/(m^3)]
Notes
-----
'''
return self.dU_dT_P()*self.dT_dV()
def dA_dT(self):
r'''Method to calculate and return the constant-pressure
temperature derivative of Helmholtz energy.
.. math::
\left(\frac{\partial A}{\partial T}\right)_{P}
= -T \left(\frac{\partial S}{\partial T}\right)_{P}
- S + \left(\frac{\partial U}{\partial T}\right)_{P}
Returns
-------
dA_dT : float
Constant-pressure temperature derivative of Helmholtz energy,
[J/(mol*K)]
Notes
-----
'''
return -self.T*self.dS_dT() - self.S() + self.dU_dT()
dA_dT_P = dA_dT
def dA_dT_V(self):
r'''Method to calculate and return the constant-volume
temperature derivative of Helmholtz energy.
.. math::
\left(\frac{\partial A}{\partial T}\right)_{V}
= \left(\frac{\partial H}{\partial T}\right)_{V}
- V \left(\frac{\partial P}{\partial T}\right)_{V}
- T \left(\frac{\partial S}{\partial T}\right)_{V}
- S
Returns
-------
dA_dT_V : float
Constant-volume temperature derivative of Helmholtz energy,
[J/(mol*K)]
Notes
-----
'''
return (self.dH_dT_V() - self.V()*self.dP_dT() - self.T*self.dS_dT_V()
- self.S())
def dA_dP(self):
r'''Method to calculate and return the constant-temperature
pressure derivative of Helmholtz energy.
.. math::
\left(\frac{\partial A}{\partial P}\right)_{T}
= -T \left(\frac{\partial S}{\partial P}\right)_{T}
+ \left(\frac{\partial U}{\partial P}\right)_{T}
Returns
-------
dA_dP : float
Constant-temperature pressure derivative of Helmholtz energy,
[J/(mol*Pa)]
Notes
-----
'''
return -self.T*self.dS_dP() + self.dU_dP()
dA_dP_T = dA_dP
def dA_dP_V(self):
r'''Method to calculate and return the constant-volume
pressure derivative of Helmholtz energy.
.. math::
\left(\frac{\partial A}{\partial P}\right)_{V}
= \left(\frac{\partial H}{\partial P}\right)_{V}
- V - S\left(\frac{\partial T}{\partial P}\right)_{V}
-T \left(\frac{\partial S}{\partial P}\right)_{V}
Returns
-------
dA_dP_V : float
Constant-volume pressure derivative of Helmholtz energy,
[J/(mol*Pa)]
Notes
-----
'''
return (self.dH_dP_V() - self.V() - self.dT_dP()*self.S()
- self.T*self.dS_dP_V())
def dA_dV_T(self):
r'''Method to calculate and return the constant-temperature
volume derivative of Helmholtz energy.
.. math::
\left(\frac{\partial A}{\partial V}\right)_{T}
= \left(\frac{\partial A}{\partial P}\right)_{T}
\left(\frac{\partial P}{\partial V}\right)_{T}
Returns
-------
dA_dV_T : float
Constant-temperature volume derivative of Helmholtz energy,
[J/(m^3)]
Notes
-----
'''
return self.dA_dP_T()*self.dP_dV()
def dA_dV_P(self):
r'''Method to calculate and return the constant-pressure
volume derivative of Helmholtz energy.
.. math::
\left(\frac{\partial A}{\partial V}\right)_{P}
= \left(\frac{\partial A}{\partial T}\right)_{P}
\left(\frac{\partial T}{\partial V}\right)_{P}
Returns
-------
dA_dV_P : float
Constant-pressure volume derivative of Helmholtz energy,
[J/(m^3)]
Notes
-----
'''
return self.dA_dT_P()*self.dT_dV()
def G_dep(self):
r'''Method to calculate and return the departure Gibbs free energy of
the phase.
.. math::
G_{dep} = H_{dep} - TS_{dep}
Returns
-------
G_dep : float
Departure Gibbs free energy, [J/mol]
Notes
-----
'''
G_dep = self.H_dep() - self.T*self.S_dep()
return G_dep
def V_dep(self):
r'''Method to calculate and return the departure (from ideal gas
behavior) molar volume of the phase.
.. math::
V_{dep} = V - \frac{RT}{P}
Returns
-------
V_dep : float
Departure molar volume, [m^3/mol]
Notes
-----
'''
V_dep = self.V() - self.R*self.T/self.P
return V_dep
def U_dep(self):
r'''Method to calculate and return the departure internal energy of
the phase.
.. math::
U_{dep} = H_{dep} - PV_{dep}
Returns
-------
U_dep : float
Departure internal energy, [J/mol]
Notes
-----
'''
return self.H_dep() - self.P*self.V_dep()
def A_dep(self):
r'''Method to calculate and return the departure Helmholtz energy of
the phase.
.. math::
A_{dep} = U_{dep} - TS_{dep}
Returns
-------
A_dep : float
Departure Helmholtz energy, [J/mol]
Notes
-----
'''
return self.U_dep() - self.T*self.S_dep()
def H_reactive(self):
r'''Method to calculate and return the enthalpy of the phase on a
reactive basis, using the `Hfs` values of the phase.
.. math::
H_{reactive} = H + \sum_i z_i {H_{f,i}}
Returns
-------
H_reactive : float
Enthalpy of the phase on a reactive basis, [J/mol]
Notes
-----
'''
try:
return self._H_reactive
except AttributeError:
pass
H = self.H()
for zi, Hf in zip(self.zs, self.Hfs):
H += zi*Hf
self._H_reactive = H
return H
def S_reactive(self):
r'''Method to calculate and return the entropy of the phase on a
reactive basis, using the `Sfs` values of the phase.
.. math::
S_{reactive} = S + \sum_i z_i {S_{f,i}}
Returns
-------
S_reactive : float
Entropy of the phase on a reactive basis, [J/(mol*K)]
Notes
-----
'''
try:
return self._S_reactive
except:
pass
S = self.S()
for zi, Sf in zip(self.zs, self.Sfs):
S += zi*Sf
self._S_reactive = S
return S
def G_reactive(self):
r'''Method to calculate and return the Gibbs free energy of the phase
on a reactive basis.
.. math::
G_{reactive} = H_{reactive} - TS_{reactive}
Returns
-------
G_reactive : float
Gibbs free energy of the phase on a reactive basis, [J/(mol)]
Notes
-----
'''
G = self.H_reactive() - self.T*self.S_reactive()
return G
def U_reactive(self):
r'''Method to calculate and return the internal energy of the phase
on a reactive basis.
.. math::
U_{reactive} = H_{reactive} - PV
Returns
-------
U_reactive : float
Internal energy of the phase on a reactive basis, [J/(mol)]
Notes
-----
'''
U = self.H_reactive() - self.P*self.V()
return U
def A_reactive(self):
r'''Method to calculate and return the Helmholtz free energy of the
phase on a reactive basis.
.. math::
A_{reactive} = U_{reactive} - TS_{reactive}
Returns
-------
A_reactive : float
Helmholtz free energy of the phase on a reactive basis, [J/(mol)]
Notes
-----
'''
A = self.U_reactive() - self.T*self.S_reactive()
return A
def H_formation_ideal_gas(self):
r'''Method to calculate and return the ideal-gas enthalpy of formation
of the phase (as if the phase was an ideal gas).
.. math::
H_{reactive}^{ig} = \sum_i z_i {H_{f,i}}
Returns
-------
H_formation_ideal_gas : float
Enthalpy of formation of the phase on a reactive basis
as an ideal gas, [J/mol]
Notes
-----
'''
try:
return self._H_formation_ideal_gas
except AttributeError:
pass
Hf_ideal_gas = 0.0
for zi, Hf in zip(self.zs, self.Hfs):
Hf_ideal_gas += zi*Hf
self._H_formation_ideal_gas = Hf_ideal_gas
return Hf_ideal_gas
def S_formation_ideal_gas(self):
r'''Method to calculate and return the ideal-gas entropy of formation
of the phase (as if the phase was an ideal gas).
.. math::
S_{reactive}^{ig} = \sum_i z_i {S_{f,i}}
Returns
-------
S_formation_ideal_gas : float
Entropy of formation of the phase on a reactive basis
as an ideal gas, [J/(mol*K)]
Notes
-----
'''
try:
return self._S_formation_ideal_gas
except:
pass
Sf_ideal_gas = 0.0
for zi, Sf in zip(self.zs, self.Sfs):
Sf_ideal_gas += zi*Sf
self._S_formation_ideal_gas = Sf_ideal_gas
return Sf_ideal_gas
def G_formation_ideal_gas(self):
r'''Method to calculate and return the ideal-gas Gibbs free energy of
formation of the phase (as if the phase was an ideal gas).
.. math::
G_{reactive}^{ig} = H_{reactive}^{ig} - T_{ref}^{ig}
S_{reactive}^{ig}
Returns
-------
G_formation_ideal_gas : float
Gibbs free energy of formation of the phase on a reactive basis
as an ideal gas, [J/(mol)]
Notes
-----
'''
Gf = self.H_formation_ideal_gas() - self.T_REF_IG*self.S_formation_ideal_gas()
return Gf
def U_formation_ideal_gas(self):
r'''Method to calculate and return the ideal-gas internal energy of
formation of the phase (as if the phase was an ideal gas).
.. math::
U_{reactive}^{ig} = H_{reactive}^{ig} - P_{ref}^{ig}
V^{ig}
Returns
-------
U_formation_ideal_gas : float
Internal energy of formation of the phase on a reactive basis
as an ideal gas, [J/(mol)]
Notes
-----
'''
Uf = self.H_formation_ideal_gas() - self.P_REF_IG*self.V_ideal_gas()
return Uf
def A_formation_ideal_gas(self):
r'''Method to calculate and return the ideal-gas Helmholtz energy of
formation of the phase (as if the phase was an ideal gas).
.. math::
A_{reactive}^{ig} = U_{reactive}^{ig} - T_{ref}^{ig}
S_{reactive}^{ig}
Returns
-------
A_formation_ideal_gas : float
Helmholtz energy of formation of the phase on a reactive basis
as an ideal gas, [J/(mol)]
Notes
-----
'''
Af = self.U_formation_ideal_gas() - self.T_REF_IG*self.S_formation_ideal_gas()
return Af
def Cv(self):
r'''Method to calculate and return the constant-volume heat
capacity `Cv` of the phase.
.. math::
C_v = T\left(\frac{\partial P}{\partial T}\right)_V^2/
\left(\frac{\partial P}{\partial V}\right)_T + Cp
Returns
-------
Cv : float
Constant volume molar heat capacity, [J/(mol*K)]
Notes
-----
'''
try:
return self._Cv
except AttributeError:
pass
# checks out
Cp_m_Cv = Cp_minus_Cv(self.T, self.dP_dT(), self.dP_dV())
Cp = self.Cp()
self._Cv = Cv = Cp - Cp_m_Cv
return Cv
def dCv_dT_P(self):
r'''Method to calculate the temperature derivative of Cv, constant
volume heat capacity, at constant pressure.
.. math::
\left(\frac{\partial C_v}{\partial T}\right)_P =
- \frac{T \operatorname{dPdT_{V}}^{2}{\left(T \right)} \frac{d}{dT}
\operatorname{dPdV_{T}}{\left(T \right)}}{\operatorname{dPdV_{T}}^{2}
{\left(T \right)}} + \frac{2 T \operatorname{dPdT_{V}}{\left(T \right)}
\frac{d}{d T} \operatorname{dPdT_{V}}{\left(T \right)}}
{\operatorname{dPdV_{T}}{\left(T \right)}} + \frac{\operatorname{
dPdT_{V}}^{2}{\left(T \right)}}{\operatorname{dPdV_{T}}{\left(T
\right)}} + \frac{d}{d T} \operatorname{Cp}{\left(T \right)}
Returns
-------
dCv_dT_P : float
Temperature derivative of constant volume heat capacity at constant
pressure, [J/mol/K^2]
Notes
-----
Requires `d2P_dT2_PV`, `d2P_dVdT_TP`, and `d2H_dT2`.
'''
T = self.T
x0 = self.dP_dT_V()
x1 = x0*x0
x2 = self.dP_dV_T()
x3 = 1.0/x2
x50 = self.d2P_dT2_PV()
x51 = self.d2P_dVdT_TP()
x52 = self.d2H_dT2()
return 2.0*T*x0*x3*x50 - T*x1*x51*x3*x3 + x1*x3 + x52
def dCv_dP_T(self):
r'''Method to calculate the pressure derivative of Cv, constant
volume heat capacity, at constant temperature.
.. math::
\left(\frac{\partial C_v}{\partial P}\right)_T =
- T \operatorname{dPdT_{V}}{\left(P \right)} \frac{d}{d P}
\operatorname{dVdT_{P}}{\left(P \right)} - T \operatorname{
dVdT_{P}}{\left(P \right)} \frac{d}{d P} \operatorname{dPdT_{V}}
{\left(P \right)} + \frac{d}{d P} \operatorname{Cp}{\left(P\right)}
Returns
-------
dCv_dP_T : float
Pressure derivative of constant volume heat capacity at constant
temperature, [J/mol/K/Pa]
Notes
-----
Requires `d2V_dTdP`, `d2P_dTdP`, and `d2H_dTdP`.
'''
T = self.T
dP_dT_V = self.dP_dT_V()
d2V_dTdP = self.d2V_dTdP()
dV_dT_P = self.dV_dT_P()
d2P_dTdP = self.d2P_dTdP()
d2H_dep_dTdP = self.d2H_dTdP()
return -T*dP_dT_V*d2V_dTdP - T*dV_dT_P*d2P_dTdP + d2H_dep_dTdP
def chemical_potential(self):
r'''Method to calculate and return the chemical potentials of each
component in the phase [-]. For a pure substance, this is the
molar Gibbs energy on a reactive basis.
.. math::
\frac{\partial G}{\partial n_i}_{T, P, N_{j \ne i}}
Returns
-------
chemical_potential : list[float]
Chemical potentials, [J/mol]
'''
try:
return self._chemical_potentials
except AttributeError:
pass
dS_dzs = self.dS_dzs()
dH_dzs = self.dH_dzs()
T, Hfs, Sfs = self.T, self.Hfs, self.Sfs
dG_reactive_dzs = [Hfs[i] - T*(Sfs[i] + dS_dzs[i]) + dH_dzs[i] for i in range(self.N)]
dG_reactive_dns = dxs_to_dns(dG_reactive_dzs, self.zs)
chemical_potentials = dns_to_dn_partials(dG_reactive_dns, self.G_reactive())
self._chemical_potentials = chemical_potentials
return chemical_potentials
# # CORRECT DO NOT CHANGE
# # TODO analytical implementation
# def to_diff(ns):
# tot = sum(ns)
# zs = normalize(ns)
# return tot*self.to_TP_zs(self.T, self.P, zs).G_reactive()
# return jacobian(to_diff, self.zs)
def activities(self):
r'''Method to calculate and return the activities of each component
in the phase [-].
.. math::
a_i(T, P, x; f_i^0) = \frac{f_i(T, P, x)}{f_i^0(T, P_i^0)}
Returns
-------
activities : list[float]
Activities, [-]
'''
# For a good discussion, see
# Thermodynamics: Fundamentals for Applications, <NAME>, <NAME>
# 5.4 DEVIATIONS FROM IDEAL SOLUTIONS: RATIO MEASURES page 201
# CORRECT DO NOT CHANGE
fugacities = self.fugacities()
fugacities_std = self.fugacities_std() # TODO implement fugacities_std
return [fugacities[i]/fugacities_std[i] for i in range(self.N)]
def gammas(self):
r'''Method to calculate and return the activity coefficients of the
phase, [-].
Activity coefficients are defined as the ratio of
the actual fugacity coefficients times the pressure to the reference
pure fugacity coefficients times the reference pressure.
The reference pressure can be set to the actual pressure (the Lewis
Randall standard state) which makes the pressures cancel.
.. math::
\gamma_i(T, P, x; f_i^0(T, P_i^0)) =
\frac{\phi_i(T, P, x)P}{\phi_i^0(T, P_i^0) P_i^0}
Returns
-------
gammas : list[float]
Activity coefficients, [-]
'''
# For a good discussion, see
# Thermodynamics: Fundamentals for Applications, <NAME>, <NAME>
# 5.5 ACTIVITY COEFFICIENTS FROM FUGACITY COEFFICIENTS
# There is no one single definition for gamma but it is believed this is
# the most generally used one for EOSs; and activity methods
# override this
phis = self.phis()
gammas = []
T, P, N = self.T, self.P, self.N
for i in range(N):
zeros = [0.0]*N
zeros[i] = 1.0
phi = self.to_TP_zs(T=T, P=P, zs=zeros).phis()[i]
gammas.append(phis[i]/phi)
return gammas
def Cp_Cv_ratio(self):
r'''Method to calculate and return the Cp/Cv ratio of the phase.
.. math::
\frac{C_p}{C_v}
Returns
-------
Cp_Cv_ratio : float
Cp/Cv ratio, [-]
Notes
-----
'''
return self.Cp()/self.Cv()
isentropic_exponent = Cp_Cv_ratio
def Z(self):
r'''Method to calculate and return the compressibility factor of the
phase.
.. math::
Z = \frac{PV}{RT}
Returns
-------
Z : float
Compressibility factor, [-]
Notes
-----
'''
return self.P*self.V()/(self.R*self.T)
def rho(self):
r'''Method to calculate and return the molar density of the
phase.
.. math::
\rho = frac{1}{V}
Returns
-------
rho : float
Molar density, [mol/m^3]
Notes
-----
'''
return 1.0/self.V()
def dT_dP(self):
r'''Method to calculate and return the constant-volume pressure
derivative of temperature of the phase.
.. math::
\left(\frac{\partial T}{\partial P}\right)_V = \frac{1}{\left(\frac{
\partial P}{\partial T}\right)_V}
Returns
-------
dT_dP : float
Constant-volume pressure derivative of temperature, [K/Pa]
Notes
-----
'''
return 1.0/self.dP_dT()
def dV_dT(self):
r'''Method to calculate and return the constant-pressure temperature
derivative of volume of the phase.
.. math::
\left(\frac{\partial V}{\partial T}\right)_P =
\frac{-\left(\frac{\partial P}{\partial T}\right)_V}
{\left(\frac{\partial P}{\partial V}\right)_T}
Returns
-------
dV_dT : float
Constant-pressure temperature derivative of volume, [m^3/(mol*K)]
Notes
-----
'''
try:
return self._dV_dT
except AttributeError:
pass
dV_dT = self._dV_dT = -self.dP_dT()/self.dP_dV()
return dV_dT
def dV_dP(self):
r'''Method to calculate and return the constant-temperature pressure
derivative of volume of the phase.
.. math::
\left(\frac{\partial V}{\partial P}\right)_T =
{-\left(\frac{\partial V}{\partial T}\right)_P}
{\left(\frac{\partial T}{\partial P}\right)_V}
Returns
-------
dV_dP : float
Constant-temperature pressure derivative of volume, [m^3/(mol*Pa)]
Notes
-----
'''
return -self.dV_dT()*self.dT_dP()
def dT_dV(self):
r'''Method to calculate and return the constant-pressure volume
derivative of temperature of the phase.
.. math::
\left(\frac{\partial T}{\partial V}\right)_P =
\frac{1}
{\left(\frac{\partial V}{\partial T}\right)_P}
Returns
-------
dT_dV : float
Constant-pressure volume derivative of temperature, [K*m^3/(m^3)]
Notes
-----
'''
return 1./self.dV_dT()
def d2V_dP2(self):
r'''Method to calculate and return the constant-temperature pressure
derivative of volume of the phase.
.. math::
\left(\frac{\partial^2 V}{\partial P^2}\right)_T =
-\frac{\left(\frac{\partial^2 P}{\partial V^2}\right)_T}
{\left(\frac{\partial P}{\partial V}\right)_T^3}
Returns
-------
d2V_dP2 : float
Constant-temperature pressure derivative of volume, [m^3/(mol*Pa^2)]
Notes
-----
'''
inverse_dP_dV = 1.0/self.dP_dV()
inverse_dP_dV3 = inverse_dP_dV*inverse_dP_dV*inverse_dP_dV
return -self.d2P_dV2()*inverse_dP_dV3
def d2T_dP2(self):
r'''Method to calculate and return the constant-volume second pressure
derivative of temperature of the phase.
.. math::
\left(\frac{\partial^2 T}{\partial P^2}\right)_V =
-\left(\frac{\partial^2 P}{\partial T^2}\right)_V
\left(\frac{\partial T}{\partial P}\right)_V^3
Returns
-------
d2T_dP2 : float
Constant-volume second pressure derivative of temperature, [K/Pa^2]
Notes
-----
'''
dT_dP = self.dT_dP()
inverse_dP_dT2 = dT_dP*dT_dP
inverse_dP_dT3 = inverse_dP_dT2*dT_dP
return -self.d2P_dT2()*inverse_dP_dT3
def d2T_dV2(self):
r'''Method to calculate and return the constant-pressure second volume
derivative of temperature of the phase.
.. math::
\left(\frac{\partial^2 T}{\partial V^2}\right)_P = -\left[
\left(\frac{\partial^2 P}{\partial V^2}\right)_T
\left(\frac{\partial P}{\partial T}\right)_V
- \left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 P}{\partial T \partial V}\right) \right]
\left(\frac{\partial P}{\partial T}\right)^{-2}_V
+ \left[\left(\frac{\partial^2 P}{\partial T\partial V}\right)
\left(\frac{\partial P}{\partial T}\right)_V
- \left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 P}{\partial T^2}\right)_V\right]
\left(\frac{\partial P}{\partial T}\right)_V^{-3}
\left(\frac{\partial P}{\partial V}\right)_T
Returns
-------
d2T_dV2 : float
Constant-pressure second volume derivative of temperature,
[K*mol^2/m^6]
Notes
-----
'''
dP_dT = self.dP_dT()
dP_dV = self.dP_dV()
d2P_dTdV = self.d2P_dTdV()
d2P_dT2 = self.d2P_dT2()
dT_dP = self.dT_dP()
inverse_dP_dT2 = dT_dP*dT_dP
inverse_dP_dT3 = inverse_dP_dT2*dT_dP
return (-(self.d2P_dV2()*dP_dT - dP_dV*d2P_dTdV)*inverse_dP_dT2
+(d2P_dTdV*dP_dT - dP_dV*d2P_dT2)*inverse_dP_dT3*dP_dV)
def d2V_dT2(self):
r'''Method to calculate and return the constant-pressure second
temperature derivative of volume of the phase.
.. math::
\left(\frac{\partial^2 V}{\partial T^2}\right)_P = -\left[
\left(\frac{\partial^2 P}{\partial T^2}\right)_V
\left(\frac{\partial P}{\partial V}\right)_T
- \left(\frac{\partial P}{\partial T}\right)_V
\left(\frac{\partial^2 P}{\partial T \partial V}\right) \right]
\left(\frac{\partial P}{\partial V}\right)^{-2}_T
+ \left[\left(\frac{\partial^2 P}{\partial T\partial V}\right)
\left(\frac{\partial P}{\partial V}\right)_T
- \left(\frac{\partial P}{\partial T}\right)_V
\left(\frac{\partial^2 P}{\partial V^2}\right)_T\right]
\left(\frac{\partial P}{\partial V}\right)_T^{-3}
\left(\frac{\partial P}{\partial T}\right)_V
Returns
-------
d2V_dT2 : float
Constant-pressure second temperature derivative of volume,
[m^3/(mol*K^2)]
Notes
-----
'''
dP_dT = self.dP_dT()
dP_dV = self.dP_dV()
d2P_dTdV = self.d2P_dTdV()
d2P_dT2 = self.d2P_dT2()
d2P_dV2 = self.d2P_dV2()
inverse_dP_dV = 1.0/dP_dV
inverse_dP_dV2 = inverse_dP_dV*inverse_dP_dV
inverse_dP_dV3 = inverse_dP_dV*inverse_dP_dV2
return (-(d2P_dT2*dP_dV - dP_dT*d2P_dTdV)*inverse_dP_dV2
+(d2P_dTdV*dP_dV - dP_dT*d2P_dV2)*inverse_dP_dV3*dP_dT)
def d2V_dPdT(self):
r'''Method to calculate and return the derivative of pressure and then
the derivative of temperature of volume of the phase.
.. math::
\left(\frac{\partial^2 V}{\partial T\partial P}\right) =
- \left[\left(\frac{\partial^2 P}{\partial T \partial V}\right)
\left(\frac{\partial P}{\partial V}\right)_T
- \left(\frac{\partial P}{\partial T}\right)_V
\left(\frac{\partial^2 P}{\partial V^2}\right)_T
\right]\left(\frac{\partial P}{\partial V}\right)_T^{-3}
Returns
-------
d2V_dPdT : float
Derivative of pressure and then the derivative of temperature
of volume, [m^3/(mol*K*Pa)]
Notes
-----
'''
dP_dT = self.dP_dT()
dP_dV = self.dP_dV()
d2P_dTdV = self.d2P_dTdV()
d2P_dV2 = self.d2P_dV2()
inverse_dP_dV = 1.0/dP_dV
inverse_dP_dV2 = inverse_dP_dV*inverse_dP_dV
inverse_dP_dV3 = inverse_dP_dV*inverse_dP_dV2
return -(d2P_dTdV*dP_dV - dP_dT*d2P_dV2)*inverse_dP_dV3
d2V_dTdP = d2V_dPdT
def d2T_dPdV(self):
r'''Method to calculate and return the derivative of pressure and then
the derivative of volume of temperature of the phase.
.. math::
\left(\frac{\partial^2 T}{\partial P\partial V}\right) =
- \left[\left(\frac{\partial^2 P}{\partial T \partial V}\right)
\left(\frac{\partial P}{\partial T}\right)_V
- \left(\frac{\partial P}{\partial V}\right)_T
\left(\frac{\partial^2 P}{\partial T^2}\right)_V
\right]\left(\frac{\partial P}{\partial T}\right)_V^{-3}
Returns
-------
d2T_dPdV : float
Derivative of pressure and then the derivative of volume
of temperature, [K*mol/(Pa*m^3)]
Notes
-----
'''
dT_dP = self.dT_dP()
inverse_dP_dT2 = dT_dP*dT_dP
inverse_dP_dT3 = inverse_dP_dT2*dT_dP
d2P_dTdV = self.d2P_dTdV()
dP_dT = self.dP_dT()
dP_dV = self.dP_dV()
d2P_dT2 = self.d2P_dT2()
return -(d2P_dTdV*dP_dT - dP_dV*d2P_dT2)*inverse_dP_dT3
d2T_dVdP = d2T_dPdV
def d2P_dVdT(self):
r'''Method to calculate and return the second derivative of
pressure with respect to temperature and volume of the phase.
This is an alias of `d2P_dTdV`.
.. math::
\frac{\partial^2 P}{\partial V \partial T}
Returns
-------
d2P_dVdT : float
Second volume derivative of pressure, [mol*Pa^2/(J*K)]
'''
return self.d2P_dTdV()
def dZ_dzs(self):
r'''Method to calculate and return the mole fraction derivatives of the
compressibility factor `Z` of the phase.
.. math::
\frac{\partial Z}{\partial z_i}
Returns
-------
dZ_dzs : list[float]
Mole fraction derivatives of the compressibility factor of the
phase, [-]
Notes
-----
'''
factor = self.P/(self.T*self.R)
return [dV*factor for dV in self.dV_dzs()]
def dZ_dns(self):
r'''Method to calculate and return the mole number derivatives of the
compressibility factor `Z` of the phase.
.. math::
\frac{\partial Z}{\partial n_i}
Returns
-------
dZ_dns : list[float]
Mole number derivatives of the compressibility factor of the
phase, [1/mol]
Notes
-----
'''
return dxs_to_dns(self.dZ_dzs(), self.zs)
def dV_dns(self):
r'''Method to calculate and return the mole number derivatives of the
molar volume `V` of the phase.
.. math::
\frac{\partial V}{\partial n_i}
Returns
-------
dV_dns : list[float]
Mole number derivatives of the molar volume of the phase, [m^3]
Notes
-----
'''
return dxs_to_dns(self.dV_dzs(), self.zs)
# Derived properties
def PIP(self):
r'''Method to calculate and return the phase identification parameter
of the phase.
.. math::
\Pi = V \left[\frac{\frac{\partial^2 P}{\partial V \partial T}}
{\frac{\partial P }{\partial T}}- \frac{\frac{\partial^2 P}{\partial
V^2}}{\frac{\partial P}{\partial V}} \right]
Returns
-------
PIP : float
Phase identification parameter, [-]
'''
return phase_identification_parameter(self.V(), self.dP_dT(), self.dP_dV(),
self.d2P_dV2(), self.d2P_dTdV())
def kappa(self):
r'''Method to calculate and return the isothermal compressibility
of the phase.
.. math::
\kappa = -\frac{1}{V}\left(\frac{\partial V}{\partial P} \right)_T
Returns
-------
kappa : float
Isothermal coefficient of compressibility, [1/Pa]
'''
return -1.0/self.V()*self.dV_dP()
isothermal_compressibility = kappa
def dkappa_dT(self):
r'''Method to calculate and return the temperature derivative of
isothermal compressibility of the phase.
.. math::
\frac{\partial \kappa}{\partial T} = -\frac{ \left(\frac{\partial^2 V}{\partial P\partial T} \right)}{V}
+ \frac{\left(\frac{\partial V}{\partial P} \right)_T\left(\frac{\partial V}{\partial T} \right)_P}{V^2}
Returns
-------
dkappa_dT : float
First temperature derivative of isothermal coefficient of
compressibility, [1/(Pa*K)]
'''
V, dV_dP, dV_dT, d2V_dTdP = self.V(), self.dV_dP(), self.dV_dT(), self.d2V_dTdP()
return -d2V_dTdP/V + dV_dP*dV_dT/(V*V)
disothermal_compressibility_dT = dkappa_dT
def isothermal_bulk_modulus(self):
r'''Method to calculate and return the isothermal bulk modulus
of the phase.
.. math::
K_T = -V\left(\frac{\partial P}{\partial V} \right)_T
Returns
-------
isothermal_bulk_modulus : float
Isothermal bulk modulus, [Pa]
'''
return 1.0/self.kappa()
def isobaric_expansion(self):
r'''Method to calculate and return the isobatic expansion coefficient
of the phase.
.. math::
\beta = \frac{1}{V}\left(\frac{\partial V}{\partial T} \right)_P
Returns
-------
beta : float
Isobaric coefficient of a thermal expansion, [1/K]
'''
return self.dV_dT()/self.V()
def disobaric_expansion_dT(self):
r'''Method to calculate and return the temperature derivative of
isobatic expansion coefficient of the phase.
.. math::
\frac{\partial \beta}{\partial T} = \frac{1}{V}\left(
\left(\frac{\partial^2 V}{\partial T^2} \right)_P
- \left(\frac{\partial V}{\partial T} \right)_P^2/V
\right)
Returns
-------
dbeta_dT : float
Temperature derivative of isobaric coefficient of a thermal
expansion, [1/K^2]
'''
'''
from sympy import *
T, P = symbols('T, P')
V = symbols('V', cls=Function)
expr = 1/V(T, P)*Derivative(V(T, P), T)
diff(expr, T)
Derivative(V(T, P), (T, 2))/V(T, P) - Derivative(V(T, P), T)**2/V(T, P)**2
# Untested
'''
V_inv = 1.0/self.V()
dV_dT = self.dV_dT()
return V_inv*(self.d2V_dT2() - dV_dT*dV_dT*V_inv)
def disobaric_expansion_dP(self):
r'''Method to calculate and return the pressure derivative of
isobatic expansion coefficient of the phase.
.. math::
\frac{\partial \beta}{\partial P} = \frac{1}{V}\left(
\left(\frac{\partial^2 V}{\partial T\partial P} \right)
-\frac{ \left(\frac{\partial V}{\partial T} \right)_P
\left(\frac{\partial V}{\partial P} \right)_T}{V}
\right)
Returns
-------
dbeta_dP : float
Pressure derivative of isobaric coefficient of a thermal
expansion, [1/(K*Pa)]
'''
'''
from sympy import *
T, P = symbols('T, P')
V = symbols('V', cls=Function)
expr = 1/V(T, P)*Derivative(V(T, P), T)
diff(expr, P)
Derivative(V(T, P), P, T)/V(T, P) - Derivative(V(T, P), P)*Derivative(V(T, P), T)/V(T, P)**2
'''
V_inv = 1.0/self.V()
dV_dT = self.dV_dT()
dV_dP = self.dV_dP()
return V_inv*(self.d2V_dTdP() - dV_dT*dV_dP*V_inv)
def Joule_Thomson(self):
r'''Method to calculate and return the Joule-Thomson coefficient
of the phase.
.. math::
\mu_{JT} = \left(\frac{\partial T}{\partial P}\right)_H = \frac{1}{C_p}
\left[T \left(\frac{\partial V}{\partial T}\right)_P - V\right]
= \frac{V}{C_p}\left(\beta T-1\right)
Returns
-------
mu_JT : float
Joule-Thomson coefficient [K/Pa]
'''
return Joule_Thomson(self.T, self.V(), self.Cp(), dV_dT=self.dV_dT(), beta=self.isobaric_expansion())
def speed_of_sound(self):
r'''Method to calculate and return the molar speed of sound
of the phase.
.. math::
w = \left[-V^2 \left(\frac{\partial P}{\partial V}\right)_T \frac{C_p}
{C_v}\right]^{1/2}
A similar expression based on molar density is:
.. math::
w = \left[\left(\frac{\partial P}{\partial \rho}\right)_T \frac{C_p}
{C_v}\right]^{1/2}
Returns
-------
w : float
Speed of sound for a real gas, [m*kg^0.5/(s*mol^0.5)]
'''
# Intentionally molar
return speed_of_sound(self.V(), self.dP_dV(), self.Cp(), self.Cv())
### Compressibility factor derivatives
def dZ_dT(self):
r'''Method to calculate and return the temperature derivative of
compressibility of the phase.
.. math::
\frac{\partial Z}{\partial P} = P\frac{\left(\frac{\partial
V}{\partial T}\right)_P - \frac{-V}{T}}{RT}
Returns
-------
dZ_dT : float
Temperature derivative of compressibility, [1/K]
'''
T_inv = 1.0/self.T
return self.P*self.R_inv*T_inv*(self.dV_dT() - self.V()*T_inv)
def dZ_dP(self):
r'''Method to calculate and return the pressure derivative of
compressibility of the phase.
.. math::
\frac{\partial Z}{\partial P} = \frac{V + P\left(\frac{\partial
V}{\partial P}\right)_T}{RT}
Returns
-------
dZ_dP : float
Pressure derivative of compressibility, [1/Pa]
'''
return 1.0/(self.T*self.R)*(self.V() + self.P*self.dV_dP())
def dZ_dV(self):
r'''Method to calculate and return the volume derivative of
compressibility of the phase.
.. math::
\frac{\partial Z}{\partial V} = \frac{P - \rho \left(\frac{\partial
P}{\partial \rho}\right)_T}{RT}
Returns
-------
dZ_dV : float
Volume derivative of compressibility, [mol/(m^3)]
'''
return (self.P - self.rho()*self.dP_drho())/(self.R*self.T)
# Could add more
### Derivatives in the molar density basis
def dP_drho(self):
r'''Method to calculate and return the molar density derivative of
pressure of the phase.
.. math::
\frac{\partial P}{\partial \rho} = -V^2\left(\frac{\partial
P}{\partial V}\right)_T
Returns
-------
dP_drho : float
Molar density derivative of pressure, [Pa*m^3/mol]
'''
V = self.V()
return -V*V*self.dP_dV()
def drho_dP(self):
r'''Method to calculate and return the pressure derivative of
molar density of the phase.
.. math::
\frac{\partial \rho}{\partial P} = -\frac{1}{V^2}\left(\frac{\partial
V}{\partial P}\right)_T
Returns
-------
drho_dP : float
Pressure derivative of Molar density, [mol/(Pa*m^3)]
'''
V = self.V()
return -self.dV_dP()/(V*V)
def d2P_drho2(self):
r'''Method to calculate and return the second molar density derivative
of pressure of the phase.
.. math::
\frac{\partial^2 P}{\partial \rho^2} = -V^2\left(
-V^2 \left(\frac{\partial^2 P}{\partial V^2}\right)_T
-2 V \left(\frac{\partial P}{\partial V}\right)_T
\right)
Returns
-------
d2P_drho2 : float
Second molar density derivative of pressure, [Pa*m^6/mol^2]
'''
V = self.V()
return V*V*V*(V*self.d2P_dV2() + 2.0*self.dP_dV())
def d2rho_dP2(self):
r'''Method to calculate and return the second pressure derivative
of molar density of the phase.
.. math::
\frac{\partial^2 \rho}{\partial P^2} = -\frac{1}{V^2}
\left(\frac{\partial^2 V}{\partial P^2}\right)_T
+ \frac{2}{V^3} \left(\frac{\partial V}{\partial P}\right)_T^2
Returns
-------
d2rho_dP2 : float
Second pressure derivative of molar density, [mol^2/(Pa*m^6)]
'''
V = self.V()
return -self.d2V_dP2()/V**2 + 2*self.dV_dP()**2/V**3
def dT_drho(self):
r'''Method to calculate and return the molar density derivative of
temperature of the phase.
.. math::
\frac{\partial T}{\partial \rho} = -V^2\left(\frac{\partial
T}{\partial V}\right)_P
Returns
-------
dT_drho : float
Molar density derivative of temperature, [K*m^3/mol]
'''
V = self.V()
return -V*V*self.dT_dV()
def d2T_drho2(self):
r'''Method to calculate and return the second molar density derivative
of temperature of the phase.
.. math::
\frac{\partial^2 T}{\partial \rho^2} = -V^2\left(
-V^2 \left(\frac{\partial^2 T}{\partial V^2}\right)_P
-2 V \left(\frac{\partial T}{\partial V}\right)_P
\right)
Returns
-------
d2T_drho2 : float
Second molar density derivative of temperature, [K*m^6/mol^2]
'''
V = self.V()
return V*V*V*(V*self.d2T_dV2() + 2.0*self.dT_dV())
def drho_dT(self):
r'''Method to calculate and return the temperature derivative of
molar density of the phase.
.. math::
\frac{\partial \rho}{\partial T} = -\frac{1}{V^2}\left(\frac{\partial
V}{\partial T}\right)_P
Returns
-------
drho_dT : float
Temperature derivative of molar density, [mol/(K*m^3)]
'''
V = self.V()
return -self.dV_dT()/(V*V)
def d2rho_dT2(self):
r'''Method to calculate and return the second temperature derivative
of molar density of the phase.
.. math::
\frac{\partial^2 \rho}{\partial T^2} = -\frac{1}{V^2}
\left(\frac{\partial^2 V}{\partial T^2}\right)_P
+ \frac{2}{V^3} \left(\frac{\partial V}{\partial T}\right)_T^2
Returns
-------
d2rho_dT2 : float
Second temperature derivative of molar density, [mol^2/(K*m^6)]
'''
d2V_dT2 = self.d2V_dT2()
V = self.V()
dV_dT = self.dV_dT()
return -d2V_dT2/V**2 + 2*dV_dT**2/V**3
def d2P_dTdrho(self):
r'''Method to calculate and return the temperature derivative
and then molar density derivative of the pressure of the phase.
.. math::
\frac{\partial^2 P}{\partial T \partial \rho} = -V^2
\left(\frac{\partial^2 P}{\partial T \partial V}\right)
Returns
-------
d2P_dTdrho : float
Temperature derivative and then molar density derivative of the
pressure, [Pa*m^3/(K*mol)]
'''
V = self.V()
d2P_dTdV = self.d2P_dTdV()
return -(V*V)*d2P_dTdV
def d2T_dPdrho(self):
r'''Method to calculate and return the pressure derivative
and then molar density derivative of the temperature of the phase.
.. math::
\frac{\partial^2 T}{\partial P \partial \rho} = -V^2
\left(\frac{\partial^2 T}{\partial P \partial V}\right)
Returns
-------
d2T_dPdrho : float
Pressure derivative and then molar density derivative of the
temperature, [K*m^3/(Pa*mol)]
'''
V = self.V()
d2T_dPdV = self.d2T_dPdV()
return -(V*V)*d2T_dPdV
def d2rho_dPdT(self):
r'''Method to calculate and return the pressure derivative
and then temperature derivative of the molar density of the phase.
.. math::
\frac{\partial^2 \rho}{\partial P \partial T} = -\frac{1}{V^2}
\left(\frac{\partial^2 V}{\partial P \partial T}\right)
+ \frac{2}{V^3} \left(\frac{\partial V}{\partial T}\right)_P
\left(\frac{\partial V}{\partial P}\right)_T
Returns
-------
d2rho_dPdT : float
Pressure derivative and then temperature derivative of the
molar density, [mol/(m^3*K*Pa)]
'''
d2V_dPdT = self.d2V_dPdT()
dV_dT = self.dV_dT()
dV_dP = self.dV_dP()
V = self.V()
return -d2V_dPdT/V**2 + 2*dV_dT*dV_dP/V**3
def drho_dV_T(self):
r'''Method to calculate and return the volume derivative of
molar density of the phase.
.. math::
\frac{\partial \rho}{\partial V} = -\frac{1}{V^2}
Returns
-------
drho_dV_T : float
Molar density derivative of volume, [mol^2/m^6]
'''
V = self.V()
return -1.0/(V*V)
def drho_dT_V(self):
r'''Method to calculate and return the temperature derivative of
molar density of the phase at constant volume.
.. math::
\left(\frac{\partial \rho}{\partial T}\right)_V = 0
Returns
-------
drho_dT_V : float
Temperature derivative of molar density of the phase at constant
volume, [mol/(m^3*K)]
'''
return 0.0
# Idea gas heat capacity
def _setup_Cpigs(self, HeatCapacityGases):
Cpgs_data = None
Cpgs_poly_fit = all(i.method == POLY_FIT for i in HeatCapacityGases) if HeatCapacityGases is not None else False
if Cpgs_poly_fit:
T_REF_IG = self.T_REF_IG
Cpgs_data = [[i.poly_fit_Tmin for i in HeatCapacityGases],
[i.poly_fit_Tmin_slope for i in HeatCapacityGases],
[i.poly_fit_Tmin_value for i in HeatCapacityGases],
[i.poly_fit_Tmax for i in HeatCapacityGases],
[i.poly_fit_Tmax_slope for i in HeatCapacityGases],
[i.poly_fit_Tmax_value for i in HeatCapacityGases],
[i.poly_fit_log_coeff for i in HeatCapacityGases],
# [horner(i.poly_fit_int_coeffs, i.poly_fit_Tmin) for i in HeatCapacityGases],
[horner(i.poly_fit_int_coeffs, i.poly_fit_Tmin) - i.poly_fit_Tmin*(0.5*i.poly_fit_Tmin_slope*i.poly_fit_Tmin + i.poly_fit_Tmin_value - i.poly_fit_Tmin_slope*i.poly_fit_Tmin) for i in HeatCapacityGases],
# [horner(i.poly_fit_int_coeffs, i.poly_fit_Tmax) for i in HeatCapacityGases],
[horner(i.poly_fit_int_coeffs, i.poly_fit_Tmax) - horner(i.poly_fit_int_coeffs, i.poly_fit_Tmin) + i.poly_fit_Tmin*(0.5*i.poly_fit_Tmin_slope*i.poly_fit_Tmin + i.poly_fit_Tmin_value - i.poly_fit_Tmin_slope*i.poly_fit_Tmin) for i in HeatCapacityGases],
# [horner_log(i.poly_fit_T_int_T_coeffs, i.poly_fit_log_coeff, i.poly_fit_Tmin) for i in HeatCapacityGases],
[horner_log(i.poly_fit_T_int_T_coeffs, i.poly_fit_log_coeff, i.poly_fit_Tmin) -(i.poly_fit_Tmin_slope*i.poly_fit_Tmin + (i.poly_fit_Tmin_value - i.poly_fit_Tmin_slope*i.poly_fit_Tmin)*log(i.poly_fit_Tmin)) for i in HeatCapacityGases],
# [horner_log(i.poly_fit_T_int_T_coeffs, i.poly_fit_log_coeff, i.poly_fit_Tmax) for i in HeatCapacityGases],
[(horner_log(i.poly_fit_T_int_T_coeffs, i.poly_fit_log_coeff, i.poly_fit_Tmax)
- horner_log(i.poly_fit_T_int_T_coeffs, i.poly_fit_log_coeff, i.poly_fit_Tmin)
+ (i.poly_fit_Tmin_slope*i.poly_fit_Tmin + (i.poly_fit_Tmin_value - i.poly_fit_Tmin_slope*i.poly_fit_Tmin)*log(i.poly_fit_Tmin))
- (i.poly_fit_Tmax_value -i.poly_fit_Tmax*i.poly_fit_Tmax_slope)*log(i.poly_fit_Tmax)) for i in HeatCapacityGases],
[poly_fit_integral_value(T_REF_IG, i.poly_fit_int_coeffs, i.poly_fit_Tmin,
i.poly_fit_Tmax, i.poly_fit_Tmin_value,
i.poly_fit_Tmax_value, i.poly_fit_Tmin_slope,
i.poly_fit_Tmax_slope) for i in HeatCapacityGases],
[i.poly_fit_coeffs for i in HeatCapacityGases],
[i.poly_fit_int_coeffs for i in HeatCapacityGases],
[i.poly_fit_T_int_T_coeffs for i in HeatCapacityGases],
[poly_fit_integral_over_T_value(T_REF_IG, i.poly_fit_T_int_T_coeffs, i.poly_fit_log_coeff, i.poly_fit_Tmin,
i.poly_fit_Tmax, i.poly_fit_Tmin_value,
i.poly_fit_Tmax_value, i.poly_fit_Tmin_slope,
i.poly_fit_Tmax_slope) for i in HeatCapacityGases],
]
return (Cpgs_poly_fit, Cpgs_data)
def _Cp_pure_fast(self, Cps_data):
Cps = []
T, cmps = self.T, range(self.N)
Tmins, Tmaxs, coeffs = Cps_data[0], Cps_data[3], Cps_data[12]
Tmin_slopes = Cps_data[1]
Tmin_values = Cps_data[2]
Tmax_slopes = Cps_data[4]
Tmax_values = Cps_data[5]
for i in cmps:
if T < Tmins[i]:
Cp = (T - Tmins[i])*Tmin_slopes[i] + Tmin_values[i]
elif T > Tmaxs[i]:
Cp = (T - Tmaxs[i])*Tmax_slopes[i] + Tmax_values[i]
else:
Cp = 0.0
for c in coeffs[i]:
Cp = Cp*T + c
Cps.append(Cp)
return Cps
def _dCp_dT_pure_fast(self, Cps_data):
dCps = []
T, cmps = self.T, range(self.N)
Tmins, Tmaxs, coeffs = Cps_data[0], Cps_data[3], Cps_data[12]
Tmin_slopes = Cps_data[1]
Tmax_slopes = Cps_data[4]
for i in cmps:
if T < Tmins[i]:
dCp = Tmin_slopes[i]
elif T > Tmaxs[i]:
dCp = Tmax_slopes[i]
else:
Cp, dCp = 0.0, 0.0
for c in coeffs[i]:
dCp = T*dCp + Cp
Cp = T*Cp + c
dCps.append(dCp)
return dCps
def _Cp_integrals_pure_fast(self, Cps_data):
Cp_integrals_pure = []
T, cmps = self.T, range(self.N)
Tmins, Tmaxes, int_coeffs = Cps_data[0], Cps_data[3], Cps_data[13]
for i in cmps:
# If indeed everything is working here, need to optimize to decide what to store
# Try to save lookups to avoid cache misses
# Instead of storing horner Tmin and Tmax, store -:
# tot(Tmin) - Cps_data[7][i]
# and tot1 + tot for the high T
# Should save quite a bit of lookups! est. .12 go to .09
# Tmin = Tmins[i]
# if T < Tmin:
# x1 = Cps_data[2][i] - Cps_data[1][i]*Tmin
# H = T*(0.5*Cps_data[1][i]*T + x1)
# elif (T <= Tmaxes[i]):
# x1 = Cps_data[2][i] - Cps_data[1][i]*Tmin
# tot = Tmin*(0.5*Cps_data[1][i]*Tmin + x1)
#
# tot1 = 0.0
# for c in int_coeffs[i]:
# tot1 = tot1*T + c
# tot1 -= Cps_data[7][i]
## tot1 = horner(int_coeffs[i], T) - horner(int_coeffs[i], Tmin)
# H = tot + tot1
# else:
# x1 = Cps_data[2][i] - Cps_data[1][i]*Tmin
# tot = Tmin*(0.5*Cps_data[1][i]*Tmin + x1)
#
# tot1 = Cps_data[8][i] - Cps_data[7][i]
#
# x1 = Cps_data[5][i] - Cps_data[4][i]*Tmaxes[i]
# tot2 = T*(0.5*Cps_data[4][i]*T + x1) - Tmaxes[i]*(0.5*Cps_data[4][i]*Tmaxes[i] + x1)
# H = tot + tot1 + tot2
# ATTEMPT AT FAST HERE (NOW WORKING)
if T < Tmins[i]:
x1 = Cps_data[2][i] - Cps_data[1][i]*Tmins[i]
H = T*(0.5*Cps_data[1][i]*T + x1)
elif (T <= Tmaxes[i]):
H = 0.0
for c in int_coeffs[i]:
H = H*T + c
H -= Cps_data[7][i]
else:
Tmax_slope = Cps_data[4][i]
x1 = Cps_data[5][i] - Tmax_slope*Tmaxes[i]
H = T*(0.5*Tmax_slope*T + x1) - Tmaxes[i]*(0.5*Tmax_slope*Tmaxes[i] + x1)
H += Cps_data[8][i]
Cp_integrals_pure.append(H - Cps_data[11][i])
return Cp_integrals_pure
def _Cp_integrals_over_T_pure_fast(self, Cps_data):
Cp_integrals_over_T_pure = []
T, cmps = self.T, range(self.N)
Tmins, Tmaxes, T_int_T_coeffs = Cps_data[0], Cps_data[3], Cps_data[14]
logT = log(T)
for i in cmps:
Tmin = Tmins[i]
if T < Tmin:
x1 = Cps_data[2][i] - Cps_data[1][i]*Tmin
S = (Cps_data[1][i]*T + x1*logT)
elif (Tmin <= T <= Tmaxes[i]):
S = 0.0
for c in T_int_T_coeffs[i]:
S = S*T + c
S += Cps_data[6][i]*logT
# The below should be in a constant - taking the place of Cps_data[9]
S -= Cps_data[9][i]
# x1 = Cps_data[2][i] - Cps_data[1][i]*Tmin
# S += (Cps_data[1][i]*Tmin + x1*log(Tmin))
else:
# x1 = Cps_data[2][i] - Cps_data[1][i]*Tmin
# S = (Cps_data[1][i]*Tmin + x1*log(Tmin))
# S += (Cps_data[10][i] - Cps_data[9][i])
S = Cps_data[10][i]
# The above should be in the constant Cps_data[10], - x2*log(Tmaxes[i]) also
x2 = Cps_data[5][i] - Tmaxes[i]*Cps_data[4][i]
S += -Cps_data[4][i]*(Tmaxes[i] - T) + x2*logT #- x2*log(Tmaxes[i])
Cp_integrals_over_T_pure.append(S - Cps_data[15][i])
return Cp_integrals_over_T_pure
def Cpigs_pure(self):
r'''Method to calculate and return the ideal-gas heat capacities of
every component in the phase. This method is powered by the
`HeatCapacityGases` objects, except when all components have the same
heat capacity form and a fast implementation has been written for it
(currently only polynomials).
Returns
-------
Cp_ig : list[float]
Molar ideal gas heat capacities, [J/(mol*K)]
'''
try:
return self._Cpigs
except AttributeError:
pass
if self.Cpgs_poly_fit:
self._Cpigs = self._Cp_pure_fast(self._Cpgs_data)
return self._Cpigs
T = self.T
self._Cpigs = [i.T_dependent_property(T) for i in self.HeatCapacityGases]
return self._Cpigs
def Cpig_integrals_pure(self):
r'''Method to calculate and return the integrals of the ideal-gas heat
capacities of every component in the phase from a temperature of
:obj:`Phase.T_REF_IG` to the system temperature. This method is powered by the
`HeatCapacityGases` objects, except when all components have the same
heat capacity form and a fast implementation has been written for it
(currently only polynomials).
.. math::
\Delta H^{ig} = \int^T_{T_{ref}} C_p^{ig} dT
Returns
-------
dH_ig : list[float]
Integrals of ideal gas heat capacity from the reference
temperature to the system temperature, [J/(mol)]
'''
try:
return self._Cpig_integrals_pure
except AttributeError:
pass
if self.Cpgs_poly_fit:
self._Cpig_integrals_pure = self._Cp_integrals_pure_fast(self._Cpgs_data)
return self._Cpig_integrals_pure
T, T_REF_IG, HeatCapacityGases = self.T, self.T_REF_IG, self.HeatCapacityGases
self._Cpig_integrals_pure = [obj.T_dependent_property_integral(T_REF_IG, T)
for obj in HeatCapacityGases]
return self._Cpig_integrals_pure
def Cpig_integrals_over_T_pure(self):
r'''Method to calculate and return the integrals of the ideal-gas heat
capacities divided by temperature of every component in the phase from
a temperature of :obj:`Phase.T_REF_IG` to the system temperature.
This method is powered by the
`HeatCapacityGases` objects, except when all components have the same
heat capacity form and a fast implementation has been written for it
(currently only polynomials).
.. math::
\Delta S^{ig} = \int^T_{T_{ref}} \frac{C_p^{ig}}{T} dT
Returns
-------
dS_ig : list[float]
Integrals of ideal gas heat capacity over temperature from the
reference temperature to the system temperature, [J/(mol)]
'''
try:
return self._Cpig_integrals_over_T_pure
except AttributeError:
pass
if self.Cpgs_poly_fit:
self._Cpig_integrals_over_T_pure = self._Cp_integrals_over_T_pure_fast(self._Cpgs_data)
return self._Cpig_integrals_over_T_pure
T, T_REF_IG, HeatCapacityGases = self.T, self.T_REF_IG, self.HeatCapacityGases
self._Cpig_integrals_over_T_pure = [obj.T_dependent_property_integral_over_T(T_REF_IG, T)
for obj in HeatCapacityGases]
return self._Cpig_integrals_over_T_pure
def dCpigs_dT_pure(self):
r'''Method to calculate and return the first temperature derivative of
ideal-gas heat capacities of every component in the phase. This method
is powered by the `HeatCapacityGases` objects, except when all
components have the same heat capacity form and a fast implementation
has been written for it (currently only polynomials).
.. math::
\frac{\partial C_p^{ig}}{\partial T}
Returns
-------
dCp_ig_dT : list[float]
First temperature derivatives of molar ideal gas heat capacities,
[J/(mol*K^2)]
'''
try:
return self._dCpigs_dT
except AttributeError:
pass
if self.Cpgs_poly_fit:
self._dCpigs_dT = self._dCp_dT_pure_fast(self._Cpgs_data)
return self._dCpigs_dT
T = self.T
self._dCpigs_dT = [i.T_dependent_property_derivative(T) for i in self.HeatCapacityGases]
return self._dCpigs_dT
def _Cpls_pure(self):
try:
return self._Cpls
except AttributeError:
pass
if self.Cpls_poly_fit:
self._Cpls = self._Cp_pure_fast(self._Cpls_data)
return self._Cpls
T = self.T
self._Cpls = [i.T_dependent_property(T) for i in self.HeatCapacityLiquids]
return self._Cpls
def _Cpl_integrals_pure(self):
try:
return self._Cpl_integrals_pure
except AttributeError:
pass
# def to_quad(T, i):
# l2 = self.to_TP_zs(T, self.P, self.zs)
# return l2._Cpls_pure()[i] + (l2.Vms_sat()[i] - T*l2.dVms_sat_dT()[i])*l2.dPsats_dT()[i]
# from scipy.integrate import quad
# vals = [float(quad(to_quad, self.T_REF_IG, self.T, args=i)[0]) for i in range(self.N)]
## print(vals, self._Cp_integrals_pure_fast(self._Cpls_data))
# return vals
if self.Cpls_poly_fit:
self._Cpl_integrals_pure = self._Cp_integrals_pure_fast(self._Cpls_data)
return self._Cpl_integrals_pure
T, T_REF_IG, HeatCapacityLiquids = self.T, self.T_REF_IG, self.HeatCapacityLiquids
self._Cpl_integrals_pure = [obj.T_dependent_property_integral(T_REF_IG, T)
for obj in HeatCapacityLiquids]
return self._Cpl_integrals_pure
def _Cpl_integrals_over_T_pure(self):
try:
return self._Cpl_integrals_over_T_pure
except AttributeError:
pass
# def to_quad(T, i):
# l2 = self.to_TP_zs(T, self.P, self.zs)
# return (l2._Cpls_pure()[i] + (l2.Vms_sat()[i] - T*l2.dVms_sat_dT()[i])*l2.dPsats_dT()[i])/T
# from scipy.integrate import quad
# vals = [float(quad(to_quad, self.T_REF_IG, self.T, args=i)[0]) for i in range(self.N)]
## print(vals, self._Cp_integrals_over_T_pure_fast(self._Cpls_data))
# return vals
if self.Cpls_poly_fit:
self._Cpl_integrals_over_T_pure = self._Cp_integrals_over_T_pure_fast(self._Cpls_data)
return self._Cpl_integrals_over_T_pure
T, T_REF_IG, HeatCapacityLiquids = self.T, self.T_REF_IG, self.HeatCapacityLiquids
self._Cpl_integrals_over_T_pure = [obj.T_dependent_property_integral_over_T(T_REF_IG, T)
for obj in HeatCapacityLiquids]
return self._Cpl_integrals_over_T_pure
def V_ideal_gas(self):
r'''Method to calculate and return the ideal-gas molar volume of the
phase.
.. math::
V^{ig} = \frac{RT}{P}
Returns
-------
V : float
Ideal gas molar volume, [m^3/mol]
'''
return self.R*self.T/self.P
def H_ideal_gas(self):
r'''Method to calculate and return the ideal-gas enthalpy of the phase.
.. math::
H^{ig} = \sum_i z_i {H_{i}^{ig}}
Returns
-------
H : float
Ideal gas enthalpy, [J/(mol)]
'''
try:
return self._H_ideal_gas
except AttributeError:
pass
H = 0.0
for zi, Cp_int in zip(self.zs, self.Cpig_integrals_pure()):
H += zi*Cp_int
self._H_ideal_gas = H
return H
def S_ideal_gas(self):
r'''Method to calculate and return the ideal-gas entropy of the phase.
.. math::
S^{ig} = \sum_i z_i S_{i}^{ig} - R\ln\left(\frac{P}{P_{ref}}\right)
- R\sum_i z_i \ln(z_i)
Returns
-------
S : float
Ideal gas molar entropy, [J/(mol*K)]
'''
try:
return self._S_ideal_gas
except AttributeError:
pass
Cpig_integrals_over_T_pure = self.Cpig_integrals_over_T_pure()
log_zs = self.log_zs()
P, zs, cmps = self.P, self.zs, range(self.N)
P_REF_IG_INV = self.P_REF_IG_INV
S = 0.0
S -= R*sum([zs[i]*log_zs[i] for i in cmps]) # ideal composition entropy composition
S -= R*log(P*P_REF_IG_INV)
for i in cmps:
S += zs[i]*Cpig_integrals_over_T_pure[i]
self._S_ideal_gas = S
return S
def Cp_ideal_gas(self):
r'''Method to calculate and return the ideal-gas heat capacity of the
phase.
.. math::
C_p^{ig} = \sum_i z_i {C_{p,i}^{ig}}
Returns
-------
Cp : float
Ideal gas heat capacity, [J/(mol*K)]
'''
try:
return self._Cp_ideal_gas
except AttributeError:
pass
Cpigs_pure = self.Cpigs_pure()
Cp, zs = 0.0, self.zs
for i in range(self.N):
Cp += zs[i]*Cpigs_pure[i]
self._Cp_ideal_gas = Cp
return Cp
def Cv_ideal_gas(self):
r'''Method to calculate and return the ideal-gas constant volume heat
capacity of the phase.
.. math::
C_v^{ig} = \sum_i z_i {C_{p,i}^{ig}} - R
Returns
-------
Cv : float
Ideal gas constant volume heat capacity, [J/(mol*K)]
'''
try:
Cp = self._Cp_ideal_gas
except AttributeError:
Cp = self.Cp_ideal_gas()
return Cp - self.R
def Cv_dep(self):
r'''Method to calculate and return the difference between the actual
`Cv` and the ideal-gas constant volume heat
capacity :math:`C_v^{ig}` of the phase.
.. math::
C_v^{dep} = C_v - C_v^{ig}
Returns
-------
Cv_dep : float
Departure ideal gas constant volume heat capacity, [J/(mol*K)]
'''
return self.Cv() - self.Cv_ideal_gas()
def Cp_Cv_ratio_ideal_gas(self):
r'''Method to calculate and return the ratio of the ideal-gas heat
capacity to its constant-volume heat capacity.
.. math::
\frac{C_p^{ig}}{C_v^{ig}}
Returns
-------
Cp_Cv_ratio_ideal_gas : float
Cp/Cv for the phase as an ideal gas, [-]
'''
return self.Cp_ideal_gas()/self.Cv_ideal_gas()
def G_ideal_gas(self):
r'''Method to calculate and return the ideal-gas Gibbs free energy of
the phase.
.. math::
G^{ig} = H^{ig} - T S^{ig}
Returns
-------
G_ideal_gas : float
Ideal gas free energy, [J/(mol)]
'''
G_ideal_gas = self.H_ideal_gas() - self.T*self.S_ideal_gas()
return G_ideal_gas
def U_ideal_gas(self):
r'''Method to calculate and return the ideal-gas internal energy of
the phase.
.. math::
U^{ig} = H^{ig} - P V^{ig}
Returns
-------
U_ideal_gas : float
Ideal gas internal energy, [J/(mol)]
'''
U_ideal_gas = self.H_ideal_gas() - self.P*self.V_ideal_gas()
return U_ideal_gas
def A_ideal_gas(self):
r'''Method to calculate and return the ideal-gas Helmholtz energy of
the phase.
.. math::
A^{ig} = U^{ig} - T S^{ig}
Returns
-------
A_ideal_gas : float
Ideal gas Helmholtz free energy, [J/(mol)]
'''
A_ideal_gas = self.U_ideal_gas() - self.T*self.S_ideal_gas()
return A_ideal_gas
def _set_mechanical_critical_point(self):
zs = self.zs
# Get initial guess
try:
try:
Tcs, Pcs = self.Tcs, self.Pcs
except:
try:
Tcs, Pcs = self.eos_mix.Tcs, self.eos_mix.Pcs
except:
Tcs, Pcs = self.constants.Tcs, self.constants.Pcs
Pmc, Tmc = 0.0, 0.0
for i in range(self.N):
Pmc += Pcs[i]*zs[i]
Tc_rts = [sqrt(Tc) for Tc in Tcs]
for i in range(self.N):
tot = 0.0
for j in range(self.N):
tot += zs[j]*Tc_rts[j]
Tmc += tot*Tc_rts[i]*zs[i]
except:
Tmc = 300.0
Pmc = 1e6
# Try to solve it
solution = [None]
def to_solve(TP):
global new
T, P = float(TP[0]), float(TP[1])
new = self.to_TP_zs(T=T, P=P, zs=zs)
errs = [new.dP_drho(), new.d2P_drho2()]
solution[0] = new
return errs
jac = lambda TP: jacobian(to_solve, TP, scalar=False)
TP, iters = newton_system(to_solve, [Tmc, Pmc], jac=jac, ytol=1e-10)
# TP = fsolve(to_solve, [Tmc, Pmc]) # fsolve handles the discontinuities badly
T, P = float(TP[0]), float(TP[1])
new = solution[0]
V = new.V()
self._mechanical_critical_T = T
self._mechanical_critical_P = P
self._mechanical_critical_V = V
return T, P, V
def Tmc(self):
r'''Method to calculate and return the mechanical critical temperature
of the phase.
Returns
-------
Tmc : float
Mechanical critical temperature, [K]
'''
try:
return self._mechanical_critical_T
except:
self._set_mechanical_critical_point()
return self._mechanical_critical_T
def Pmc(self):
r'''Method to calculate and return the mechanical critical pressure
of the phase.
Returns
-------
Pmc : float
Mechanical critical pressure, [Pa]
'''
try:
return self._mechanical_critical_P
except:
self._set_mechanical_critical_point()
return self._mechanical_critical_P
def Vmc(self):
r'''Method to calculate and return the mechanical critical volume
of the phase.
Returns
-------
Vmc : float
Mechanical critical volume, [m^3/mol]
'''
try:
return self._mechanical_critical_V
except:
self._set_mechanical_critical_point()
return self._mechanical_critical_V
def Zmc(self):
r'''Method to calculate and return the mechanical critical
compressibility of the phase.
Returns
-------
Zmc : float
Mechanical critical compressibility, [-]
'''
return (self.Pmc()*self.Vmc())/(self.R*self.Tmc())
def dH_dT_P(self):
r'''Method to calculate and return the temperature derivative of
enthalpy of the phase at constant pressure.
Returns
-------
dH_dT_P : float
Temperature derivative of enthalpy, [J/(mol*K)]
'''
return self.dH_dT()
def dH_dP_T(self):
r'''Method to calculate and return the pressure derivative of
enthalpy of the phase at constant pressure.
Returns
-------
dH_dP_T : float
Pressure derivative of enthalpy, [J/(mol*Pa)]
'''
return self.dH_dP()
def dS_dP_T(self):
r'''Method to calculate and return the pressure derivative of
entropy of the phase at constant pressure.
Returns
-------
dS_dP_T : float
Pressure derivative of entropy, [J/(mol*K*Pa)]
'''
return self.dS_dP()
def dS_dV_T(self):
r'''Method to calculate and return the volume derivative of
entropy of the phase at constant temperature.
Returns
-------
dS_dV_T : float
Volume derivative of entropy, [J/(K*m^3)]
'''
return self.dS_dP_T()*self.dP_dV()
def dS_dV_P(self):
r'''Method to calculate and return the volume derivative of
entropy of the phase at constant pressure.
Returns
-------
dS_dV_P : float
Volume derivative of entropy, [J/(K*m^3)]
'''
return self.dS_dT_P()*self.dT_dV()
def dP_dT_P(self):
r'''Method to calculate and return the temperature derivative of
temperature of the phase at constant pressure.
Returns
-------
dP_dT_P : float
Temperature derivative of temperature, [-]
'''
return 0.0
def dP_dV_P(self):
r'''Method to calculate and return the volume derivative of
pressure of the phase at constant pressure.
Returns
-------
dP_dV_P : float
Volume derivative of pressure of the phase at constant pressure,
[Pa*mol/m^3]
'''
return 0.0
def dT_dP_T(self):
r'''Method to calculate and return the pressure derivative of
temperature of the phase at constant temperature.
Returns
-------
dT_dP_T : float
Pressure derivative of temperature of the phase at constant
temperature, [K/Pa]
'''
return 0.0
def dT_dV_T(self):
r'''Method to calculate and return the volume derivative of
temperature of the phase at constant temperature.
Returns
-------
dT_dV_T : float
Pressure derivative of temperature of the phase at constant
temperature, [K*mol/m^3]
'''
return 0.0
def dV_dT_V(self):
r'''Method to calculate and return the temperature derivative of
volume of the phase at constant volume.
Returns
-------
dV_dT_V : float
Temperature derivative of volume of the phase at constant volume,
[m^3/(mol*K)]
'''
return 0.0
def dV_dP_V(self):
r'''Method to calculate and return the volume derivative of
pressure of the phase at constant volume.
Returns
-------
dV_dP_V : float
Pressure derivative of volume of the phase at constant pressure,
[m^3/(mol*Pa)]
'''
return 0.0
def dP_dP_T(self):
r'''Method to calculate and return the pressure derivative of
pressure of the phase at constant temperature.
Returns
-------
dP_dP_T : float
Pressure derivative of pressure of the phase at constant
temperature, [-]
'''
return 1.0
def dP_dP_V(self):
r'''Method to calculate and return the pressure derivative of
pressure of the phase at constant volume.
Returns
-------
dP_dP_V : float
Pressure derivative of pressure of the phase at constant
volume, [-]
'''
return 1.0
def dT_dT_P(self):
r'''Method to calculate and return the temperature derivative of
temperature of the phase at constant pressure.
Returns
-------
dT_dT_P : float
Temperature derivative of temperature of the phase at constant
pressure, [-]
'''
return 1.0
def dT_dT_V(self):
r'''Method to calculate and return the temperature derivative of
temperature of the phase at constant volume.
Returns
-------
dT_dT_V : float
Temperature derivative of temperature of the phase at constant
volume, [-]
'''
return 1.0
def dV_dV_T(self):
r'''Method to calculate and return the volume derivative of
volume of the phase at constant temperature.
Returns
-------
dV_dV_T : float
Volume derivative of volume of the phase at constant
temperature, [-]
'''
return 1.0
def dV_dV_P(self):
r'''Method to calculate and return the volume derivative of
volume of the phase at constant pressure.
Returns
-------
dV_dV_P : float
Volume derivative of volume of the phase at constant
pressure, [-]
'''
return 1.0
d2T_dV2_P = d2T_dV2
d2V_dT2_P = d2V_dT2
d2V_dP2_T = d2V_dP2
d2T_dP2_V = d2T_dP2
dV_dP_T = dV_dP
dV_dT_P = dV_dT
dT_dP_V = dT_dP
dT_dV_P = dT_dV
# More derivatives - at const H, S, G, U, A
_derivs_jacobian_x = 'V'
_derivs_jacobian_y = 'T'
def _derivs_jacobian(self, a, b, c, x=_derivs_jacobian_x,
y=_derivs_jacobian_y):
r'''Calculates and returns a first-order derivative of one property
with respect to another property at constant another property.
This is particularly useful to obtain derivatives with respect to
another property which is not an intensive variable in a model,
allowing for example derivatives at constant enthalpy or Gibbs energy
to be obtained. This formula is obtained from the first derivative
principles of reciprocity, the chain rule, and the triple product rule
as shown in [1]_.
... math::
\left(\frac{\partial a}{\partial b}\right)_{c}=
\frac{\left(\frac{\partial a}{\partial x}\right)_{y}\left(
\frac{\partial c}{\partial y}\right)_{x}-\left(\frac{\partial a}{
\partial y}\right)_{x}\left(\frac{\partial c}{\partial x}
\right)_{y}}{\left(\frac{\partial b}{\partial x}\right)_{y}\left(
\frac{\partial c}{\partial y}\right)_{x}-\left(\frac{\partial b}
{\partial y}\right)_{x}\left(\frac{\partial c}{\partial x}
\right)_{y}}
References
----------
.. [1] Thorade, Matthis, and <NAME>. "Partial Derivatives of
Thermodynamic State Properties for Dynamic Simulation."
Environmental Earth Sciences 70, no. 8 (April 10, 2013): 3497-3503.
https://doi.org/10.1007/s12665-013-2394-z.
'''
n0 = getattr(self, 'd%s_d%s_%s'%(a, x, y))()
n1 = getattr(self, 'd%s_d%s_%s'%(c, y, x))()
n2 = getattr(self, 'd%s_d%s_%s'%(a, y, x))()
n3 = getattr(self, 'd%s_d%s_%s'%(c, x, y))()
d0 = getattr(self, 'd%s_d%s_%s'%(b, x, y))()
d1 = getattr(self, 'd%s_d%s_%s'%(c, y, x))()
d2 = getattr(self, 'd%s_d%s_%s'%(b, y, x))()
d3 = getattr(self, 'd%s_d%s_%s'%(c, x, y))()
return (n0*n1 - n2*n3)/(d0*d1 - d2*d3)
### Transport properties - pass them on!
# Properties that use `constants` attributes
def MW(self):
r'''Method to calculate and return molecular weight of the phase.
.. math::
\text{MW} = \sum_i z_i \text{MW}_i
Returns
-------
MW : float
Molecular weight, [g/mol]
'''
try:
return self._MW
except AttributeError:
pass
zs, MWs = self.zs, self.constants.MWs
MW = 0.0
for i in range(self.N):
MW += zs[i]*MWs[i]
self._MW = MW
return MW
def MW_inv(self):
r'''Method to calculate and return inverse of molecular weight of the
phase.
.. math::
\frac{1}{\text{MW}} = \frac{1}{\sum_i z_i \text{MW}_i}
Returns
-------
MW_inv : float
Inverse of molecular weight, [mol/g]
'''
try:
return self._MW_inv
except AttributeError:
pass
self._MW_inv = MW_inv = 1.0/self.MW()
return MW_inv
def speed_of_sound_mass(self):
r'''Method to calculate and return the speed of sound
of the phase.
.. math::
w = \left[-V^2 \frac{1000}{MW}\left(\frac{\partial P}{\partial V}
\right)_T \frac{C_p}{C_v}\right]^{1/2}
Returns
-------
w : float
Speed of sound for a real gas, [m/s]
'''
# 1000**0.5 = 31.622776601683793
return 31.622776601683793/sqrt(self.MW())*self.speed_of_sound()
def rho_mass(self):
r'''Method to calculate and return mass density of the phase.
.. math::
\rho = \frac{MW}{1000\cdot VM}
Returns
-------
rho_mass : float
Mass density, [kg/m^3]
'''
try:
return self._rho_mass
except AttributeError:
pass
self._rho_mass = rho_mass = self.MW()/(1000.0*self.V())
return rho_mass
def drho_mass_dT(self):
r'''Method to calculate the mass density derivative with respect to
temperature, at constant pressure.
.. math::
\left(\frac{\partial \rho}{\partial T}\right)_{P} =
\frac{-\text{MW} \frac{\partial V_m}{\partial T}}{1000 V_m^2}
Returns
-------
drho_mass_dT : float
Temperature derivative of mass density at constant pressure,
[kg/m^3/K]
Notes
-----
Requires `dV_dT`, `MW`, and `V`.
This expression is readily obtainable with SymPy:
>>> from sympy import * # doctest: +SKIP
>>> T, P, MW = symbols('T, P, MW') # doctest: +SKIP
>>> Vm = symbols('Vm', cls=Function) # doctest: +SKIP
>>> rho_mass = (Vm(T))**-1*MW/1000 # doctest: +SKIP
>>> diff(rho_mass, T) # doctest: +SKIP
-MW*Derivative(Vm(T), T)/(1000*Vm(T)**2)
'''
try:
return self._drho_mass_dT
except AttributeError:
pass
MW = self.MW()
V = self.V()
dV_dT = self.dV_dT()
self._drho_mass_dT = drho_mass_dT = -MW*dV_dT/(1000.0*V*V)
return drho_mass_dT
def drho_mass_dP(self):
r'''Method to calculate the mass density derivative with respect to
pressure, at constant temperature.
.. math::
\left(\frac{\partial \rho}{\partial P}\right)_{T} =
\frac{-\text{MW} \frac{\partial V_m}{\partial P}}{1000 V_m^2}
Returns
-------
drho_mass_dP : float
Pressure derivative of mass density at constant temperature,
[kg/m^3/Pa]
Notes
-----
Requires `dV_dP`, `MW`, and `V`.
This expression is readily obtainable with SymTy:
>>> from sympy import * # doctest: +SKIP
>>> P, T, MW = symbols('P, T, MW') # doctest: +SKIP
>>> Vm = symbols('Vm', cls=Function) # doctest: +SKIP
>>> rho_mass = (Vm(P))**-1*MW/1000 # doctest: +SKIP
>>> diff(rho_mass, P) # doctest: +SKIP
-MW*Derivative(Vm(P), P)/(1000*Vm(P)**2)
'''
try:
return self._drho_mass_dP
except AttributeError:
pass
MW = self.MW()
V = self.V()
dV_dP = self.dV_dP()
self._drho_mass_dP = drho_mass_dP = -MW*dV_dP/(1000.0*V*V)
return drho_mass_dP
def H_mass(self):
r'''Method to calculate and return mass enthalpy of the phase.
.. math::
H_{mass} = \frac{1000 H_{molar}}{MW}
Returns
-------
H_mass : float
Mass enthalpy, [J/kg]
'''
try:
return self._H_mass
except AttributeError:
pass
self._H_mass = H_mass = self.H()*1e3*self.MW_inv()
return H_mass
def S_mass(self):
r'''Method to calculate and return mass entropy of the phase.
.. math::
S_{mass} = \frac{1000 S_{molar}}{MW}
Returns
-------
S_mass : float
Mass enthalpy, [J/(kg*K)]
'''
try:
return self._S_mass
except AttributeError:
pass
self._S_mass = S_mass = self.S()*1e3*self.MW_inv()
return S_mass
def U_mass(self):
r'''Method to calculate and return mass internal energy of the phase.
.. math::
U_{mass} = \frac{1000 U_{molar}}{MW}
Returns
-------
U_mass : float
Mass internal energy, [J/(kg)]
'''
try:
return self._U_mass
except AttributeError:
pass
self._U_mass = U_mass = self.U()*1e3*self.MW_inv()
return U_mass
def A_mass(self):
r'''Method to calculate and return mass Helmholtz energy of the phase.
.. math::
A_{mass} = \frac{1000 A_{molar}}{MW}
Returns
-------
A_mass : float
Mass Helmholtz energy, [J/(kg)]
'''
try:
return self._A_mass
except AttributeError:
pass
self._A_mass = A_mass = self.A()*1e3*self.MW_inv()
return A_mass
def G_mass(self):
r'''Method to calculate and return mass Gibbs energy of the phase.
.. math::
G_{mass} = \frac{1000 G_{molar}}{MW}
Returns
-------
G_mass : float
Mass Gibbs energy, [J/(kg)]
'''
try:
return self._G_mass
except AttributeError:
pass
self._G_mass = G_mass = self.G()*1e3*self.MW_inv()
return G_mass
def Cp_mass(self):
r'''Method to calculate and return mass constant pressure heat capacity
of the phase.
.. math::
Cp_{mass} = \frac{1000 Cp_{molar}}{MW}
Returns
-------
Cp_mass : float
Mass heat capacity, [J/(kg*K)]
'''
try:
return self._Cp_mass
except AttributeError:
pass
self._Cp_mass = Cp_mass = self.Cp()*1e3*self.MW_inv()
return Cp_mass
def Cv_mass(self):
r'''Method to calculate and return mass constant volume heat capacity
of the phase.
.. math::
Cv_{mass} = \frac{1000 Cv_{molar}}{MW}
Returns
-------
Cv_mass : float
Mass constant volume heat capacity, [J/(kg*K)]
'''
try:
return self._Cv_mass
except AttributeError:
pass
self._Cv_mass = Cv_mass = self.Cv()*1e3*self.MW_inv()
return Cv_mass
def P_transitions(self):
r'''Dummy method. The idea behind this method is to calculate any
pressures (at constant temperature) which cause the phase properties to
become discontinuous.
Returns
-------
P_transitions : list[float]
Transition pressures, [Pa]
'''
return []
def T_max_at_V(self, V):
r'''Method to calculate the maximum temperature the phase can create at a
constant volume, if one exists; returns None otherwise.
Parameters
----------
V : float
Constant molar volume, [m^3/mol]
Pmax : float
Maximum possible isochoric pressure, if already known [Pa]
Returns
-------
T : float
Maximum possible temperature, [K]
Notes
-----
'''
return None
def P_max_at_V(self, V):
r'''Dummy method. The idea behind this method, which is implemented by some
subclasses, is to calculate the maximum pressure the phase can create at a
constant volume, if one exists; returns None otherwise. This method,
as a dummy method, always returns None.
Parameters
----------
V : float
Constant molar volume, [m^3/mol]
Returns
-------
P : float
Maximum possible isochoric pressure, [Pa]
'''
return None
def dspeed_of_sound_dT_P(self):
r'''Method to calculate the temperature derivative of speed of sound
at constant pressure in molar units.
.. math::
\left(\frac{\partial c}{\partial T}\right)_P =
- \frac{\sqrt{- \frac{\operatorname{Cp}{\left(T \right)} V^{2}
{\left(T \right)} \operatorname{dPdV_{T}}{\left(T \right)}}
{\operatorname{Cv}{\left(T \right)}}} \left(- \frac{\operatorname{Cp}
{\left(T \right)} V^{2}{\left(T \right)} \frac{d}{d T}
\operatorname{dPdV_{T}}{\left(T \right)}}{2 \operatorname{Cv}{\left(T
\right)}} - \frac{\operatorname{Cp}{\left(T \right)} V{\left(T
\right)} \operatorname{dPdV_{T}}{\left(T \right)} \frac{d}{d T}
V{\left(T \right)}}{\operatorname{Cv}{\left(T \right)}}
+ \frac{\operatorname{Cp}{\left(T \right)} V^{2}{\left(T \right)}
\operatorname{dPdV_{T}}{\left(T \right)} \frac{d}{d T}
\operatorname{Cv}{\left(T \right)}}{2 \operatorname{Cv}^{2}
{\left(T \right)}} - \frac{V^{2}{\left(T \right)} \operatorname{
dPdV_{T}}{\left(T \right)} \frac{d}{d T} \operatorname{Cp}{\left(T
\right)}}{2 \operatorname{Cv}{\left(T \right)}}\right)
\operatorname{Cv}{\left(T \right)}}{\operatorname{Cp}{\left(T
\right)} V^{2}{\left(T \right)} \operatorname{dPdV_{T}}{\left(T
\right)}}
Returns
-------
dspeed_of_sound_dT_P : float
Temperature derivative of speed of sound at constant pressure,
[m*kg^0.5/s/mol^0.5/K]
Notes
-----
Requires the temperature derivative of Cp and Cv both at constant
pressure, as wel as the volume and temperature derivative of pressure,
calculated at constant temperature and then pressure respectively.
These can be tricky to obtain.
'''
'''Calculation with SymPy:
from sympy import *
T = symbols('T')
V, dPdV_T, Cp, Cv = symbols('V, dPdV_T, Cp, Cv', cls=Function)
c = sqrt(-V(T)**2*dPdV_T(T)*Cp(T)/Cv(T))
'''
x0 = self.Cp()
x1 = self.V()
x2 = self.dP_dV()
x3 = self.Cv()
x4 = x0*x2
x5 = x4/x3
x6 = 0.5*x1
x50 = self.d2P_dVdT_TP()
x51 = self.d2H_dT2()
x52 = self.dV_dT()
x53 = self.dCv_dT_P()
return (-x1*x1*x5)**0.5*(x0*x6*x50 + x2*x6*x51 + x4*x52- x5*x6*x53)/(x0*x1*x2)
def dspeed_of_sound_dP_T(self):
r'''Method to calculate the pressure derivative of speed of sound
at constant temperature in molar units.
.. math::
\left(\frac{\partial c}{\partial P}\right)_T =
- \frac{\sqrt{- \frac{\operatorname{Cp}{\left(P \right)} V^{2}
{\left(P \right)} \operatorname{dPdV_{T}}{\left(P \right)}}
{\operatorname{Cv}{\left(P \right)}}} \left(- \frac{
\operatorname{Cp}{\left(P \right)} V^{2}{\left(P \right)} \frac{d}
{d P} \operatorname{dPdV_{T}}{\left(P \right)}}{2 \operatorname{Cv}
{\left(P \right)}} - \frac{\operatorname{Cp}{\left(P \right)}
V{\left(P \right)} \operatorname{dPdV_{T}}{\left(P \right)}
\frac{d}{d P} V{\left(P \right)}}{\operatorname{Cv}{\left(P \right)
}} + \frac{\operatorname{Cp}{\left(P \right)} V^{2}{\left(P \right)
} \operatorname{dPdV_{T}}{\left(P \right)} \frac{d}{d P}
\operatorname{Cv}{\left(P \right)}}{2 \operatorname{Cv}^{2}{\left(P
\right)}} - \frac{V^{2}{\left(P \right)} \operatorname{dPdV_{T}}
{\left(P \right)} \frac{d}{d P} \operatorname{Cp}{\left(P \right)}}
{2 \operatorname{Cv}{\left(P \right)}}\right) \operatorname{Cv}
{\left(P \right)}}{\operatorname{Cp}{\left(P \right)} V^{2}{\left(P
\right)} \operatorname{dPdV_{T}}{\left(P \right)}}
Returns
-------
dspeed_of_sound_dP_T : float
Pressure derivative of speed of sound at constant temperature,
[m*kg^0.5/s/mol^0.5/Pa]
Notes
-----
'''
'''
from sympy import *
P = symbols('P')
V, dPdV_T, Cp, Cv = symbols('V, dPdV_T, Cp, Cv', cls=Function)
c = sqrt(-V(P)**2*dPdV_T(P)*Cp(P)/Cv(P))
print(latex(diff(c, P)))
'''
x0 = self.Cp()
x1 = self.V()
x2 = self.dP_dV()
x3 = self.Cv()
x4 = x0*x2
x5 = x4/x3
x6 = 0.5*x1
x50 = self.d2P_dVdP()
x51 = self.d2H_dTdP()
x52 = self.dV_dP()
x53 = self.dCv_dP_T()
return (-x1*x1*x5)**0.5*(x0*x6*x50 + x2*x6*x51 + x4*x52- x5*x6*x53)/(x0*x1*x2)
# Transport properties
def mu(self):
if isinstance(self, phases.gas_phases):
return self.correlations.ViscosityGasMixture.mixture_property(self.T, self.P, self.zs, self.ws())
elif isinstance(self, phases.liquid_phases):
return self.correlations.ViscosityLiquidMixture.mixture_property(self.T, self.P, self.zs, self.ws())
else:
raise NotImplementedError("Did not work")
def ws(self):
r'''Method to calculate and return the mass fractions of the phase, [-]
Returns
-------
ws : list[float]
Mass fractions, [-]
Notes
-----
'''
try:
return self._ws
except AttributeError:
pass
MWs = self.constants.MWs
zs, cmps = self.zs, range(self.N)
ws = [zs[i]*MWs[i] for i in cmps]
Mavg = 1.0/sum(ws)
for i in cmps:
ws[i] *= Mavg
self._ws = ws
return ws
def sigma(self):
r'''Calculate and return the surface tension of the phase.
For details of the implementation, see
:obj:`SurfaceTensionMixture <thermo.interface.SurfaceTensionMixture>`.
This property is strictly the ideal-gas to liquid surface tension,
not a true inter-phase property.
Returns
-------
sigma : float
Surface tension, [N/m]
'''
try:
return self._sigma
except AttributeError:
pass
try:
phase = self.assigned_phase
except:
if self.is_liquid:
phase = 'l'
else:
phase = 'g'
if phase == 'g':
return None
elif phase == 'l':
sigma = self.correlations.SurfaceTensionMixture.mixture_property(self.T, self.P, self.zs, self.ws())
self._sigma = sigma
return sigma
@property
def beta(self):
r'''Method to return the phase fraction of this phase.
This method is only
available when the phase is linked to an EquilibriumState.
Returns
-------
beta : float
Phase fraction on a molar basis, [-]
Notes
-----
'''
try:
result = self.result
except:
return None
for i, p in enumerate(result.phases):
if p is self:
return result.betas[i]
@property
def beta_mass(self):
r'''Method to return the mass phase fraction of this phase.
This method is only
available when the phase is linked to an EquilibriumState.
Returns
-------
beta_mass : float
Phase fraction on a mass basis, [-]
Notes
-----
'''
try:
result = self.result
except:
return None
for i, p in enumerate(result.phases):
if p is self:
return result.betas_mass[i]
@property
def beta_volume(self):
r'''Method to return the volumetric phase fraction of this phase.
This method is only
available when the phase is linked to an EquilibriumState.
Returns
-------
beta_volume : float
Phase fraction on a volumetric basis, [-]
Notes
-----
'''
try:
result = self.result
except:
return None
for i, p in enumerate(result.phases):
if p is self:
return result.betas_volume[i]
@property
def VF(self):
r'''Method to return the vapor fraction of the phase.
If no vapor/gas is present, 0 is always returned. This method is only
available when the phase is linked to an EquilibriumState.
Returns
-------
VF : float
Vapor fraction, [-]
Notes
-----
'''
return self.result.gas_beta
derivatives_jacobian = []
prop_iter = (('T', 'P', 'V', 'rho'), ('T', 'P', 'V', r'\rho'), ('K', 'Pa', 'm^3/mol', 'mol/m^3'), ('temperature', 'pressure', 'volume', 'density'))
for a, a_str, a_units, a_name in zip(*prop_iter):
for b, b_str, b_units, b_name in zip(*prop_iter):
for c, c_name in zip(('H', 'S', 'G', 'U', 'A'), ('enthalpy', 'entropy', 'Gibbs energy', 'internal energy', 'Helmholtz energy')):
def _der(self, property=a, differentiate_by=b, at_constant=c):
return self._derivs_jacobian(a=property, b=differentiate_by, c=at_constant)
t = 'd%s_d%s_%s' %(a, b, c)
doc = r'''Method to calculate and return the %s derivative of %s of the phase at constant %s.
.. math::
\left(\frac{\partial %s}{\partial %s}\right)_{%s}
Returns
-------
%s : float
The %s derivative of %s of the phase at constant %s, [%s/%s]
''' %(b_name, a_name, c_name, a_str, b_str, c, t, b_name, a_name, c_name, a_units, b_units)
setattr(Phase, t, _der)
try:
_der.__doc__ = doc
except:
pass
derivatives_jacobian.append(t)
derivatives_thermodynamic = ['dA_dP', 'dA_dP_T', 'dA_dP_V', 'dA_dT', 'dA_dT_P', 'dA_dT_V', 'dA_dV_P', 'dA_dV_T',
'dCv_dP_T', 'dCv_dT_P', 'dG_dP', 'dG_dP_T', 'dG_dP_V', 'dG_dT', 'dG_dT_P', 'dG_dT_V',
'dG_dV_P', 'dG_dV_T', 'dH_dP', 'dH_dP_T', 'dH_dP_V', 'dH_dT', 'dH_dT_P', 'dH_dT_V',
'dH_dV_P', 'dH_dV_T', 'dS_dP', 'dS_dP_T', 'dS_dP_V', 'dS_dT', 'dS_dT_P', 'dS_dT_V',
'dS_dV_P', 'dS_dV_T', 'dU_dP', 'dU_dP_T', 'dU_dP_V', 'dU_dT', 'dU_dT_P', 'dU_dT_V',
'dU_dV_P', 'dU_dV_T']
derivatives_thermodynamic_mass = []
prop_names = {'A' : 'Helmholtz energy',
'G': 'Gibbs free energy',
'U': 'internal energy',
'H': 'enthalpy',
'S': 'entropy',
'T': 'temperature',
'P': 'pressure',
'V': 'volume', 'Cv': 'Constant-volume heat capacity'}
prop_units = {'Cv': 'J/(mol*K)', 'A': 'J/mol', 'G': 'J/mol', 'H': 'J/mol', 'S': 'J/(mol*K)', 'U': 'J/mol', 'T': 'K', 'P': 'Pa', 'V': 'm^3/mol'}
for attr in derivatives_thermodynamic:
def _der(self, prop=attr):
return getattr(self, prop)()*1e3*self.MW_inv()
try:
base, end = attr.split('_', maxsplit=1)
except:
splits = attr.split('_')
base = splits[0]
end = '_'.join(splits[1:])
vals = attr.replace('d', '').split('_')
try:
prop, diff_by, at_constant = vals
except:
prop, diff_by = vals
at_constant = 'T' if diff_by == 'P' else 'P'
s = '%s_mass_%s' %(base, end)
doc = r'''Method to calculate and return the %s derivative of mass %s of the phase at constant %s.
.. math::
\left(\frac{\partial %s_{\text{mass}}}{\partial %s}\right)_{%s}
Returns
-------
%s : float
The %s derivative of mass %s of the phase at constant %s, [%s/%s]
''' %(prop_names[diff_by], prop_names[prop], prop_names[at_constant], prop, diff_by, at_constant, s, prop_names[diff_by], prop_names[prop], prop_names[at_constant], prop_units[prop], prop_units[diff_by])
try:
_der.__doc__ = doc#'Automatically generated derivative. %s %s' %(base, end)
except:
pass
setattr(Phase, s, _der)
derivatives_thermodynamic_mass.append(s)
del prop_names, prop_units
| 73,974 |
987 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: <NAME> <<EMAIL>>
# Copyright (C) 2017 Rare Technologies
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
import unittest
import sys
from bounter import HashTable
uni_type = str if sys.version_info >= (3, 0) else unicode
class HashTableIterTypeTest(unittest.TestCase):
"""
Functional tests for HashTable use_unicode parameter
"""
def setUp(self):
self.ht = HashTable(buckets=64)
def test_itertype_default_unicode(self):
self.ht.update([b'bytes', u'Unicode', 'String'])
for key in self.ht:
self.assertEqual(type(key), uni_type)
for key, _ in self.ht.items():
self.assertEqual(type(key), uni_type)
def test_itertype_use_unicode_true(self):
self.ht = HashTable(buckets=64, use_unicode=True)
self.ht.update([b'bytes', u'Unicode', 'String'])
for key in self.ht:
self.assertEqual(type(key), uni_type)
for key, _ in self.ht.items():
self.assertEqual(type(key), uni_type)
def test_itertype_use_unicode_false(self):
self.ht = HashTable(buckets=64, use_unicode=False)
self.ht.update([b'bytes', u'Unicode', 'String'])
for key in self.ht:
self.assertEqual(type(key), bytes)
for key, _ in self.ht.items():
self.assertEqual(type(key), bytes)
if __name__ == '__main__':
unittest.main()
| 649 |
1,716 | <reponame>hlemorvan/jimfs
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.jimfs;
import java.nio.channels.AsynchronousFileChannel;
import java.nio.channels.FileChannel;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.nio.file.SecureDirectoryStream;
import java.nio.file.attribute.FileAttribute;
import java.util.Set;
import java.util.concurrent.ExecutorService;
/**
* Optional file system features that may be supported or unsupported by a Jimfs file system
* instance.
*
* @author <NAME>
*/
public enum Feature {
/**
* Feature controlling support for hard links to regular files.
*
* <p>Affected method:
*
* <ul>
* <li>{@link Files#createLink(Path, Path)}
* </ul>
*
* <p>If this feature is not enabled, this method will throw {@link
* UnsupportedOperationException}.
*/
LINKS,
/**
* Feature controlling support for symbolic links.
*
* <p>Affected methods:
*
* <ul>
* <li>{@link Files#createSymbolicLink(Path, Path, FileAttribute...)}
* <li>{@link Files#readSymbolicLink(Path)}
* </ul>
*
* <p>If this feature is not enabled, these methods will throw {@link
* UnsupportedOperationException}.
*/
SYMBOLIC_LINKS,
/**
* Feature controlling support for {@link SecureDirectoryStream}.
*
* <p>Affected methods:
*
* <ul>
* <li>{@link Files#newDirectoryStream(Path)}
* <li>{@link Files#newDirectoryStream(Path, DirectoryStream.Filter)}
* <li>{@link Files#newDirectoryStream(Path, String)}
* </ul>
*
* <p>If this feature is enabled, the {@link DirectoryStream} instances returned by these methods
* will also implement {@link SecureDirectoryStream}.
*/
SECURE_DIRECTORY_STREAM,
/**
* Feature controlling support for {@link FileChannel}.
*
* <p>Affected methods:
*
* <ul>
* <li>{@link Files#newByteChannel(Path, OpenOption...)}
* <li>{@link Files#newByteChannel(Path, Set, FileAttribute...)}
* <li>{@link FileChannel#open(Path, OpenOption...)}
* <li>{@link FileChannel#open(Path, Set, FileAttribute...)}
* <li>{@link AsynchronousFileChannel#open(Path, OpenOption...)}
* <li>{@link AsynchronousFileChannel#open(Path, Set, ExecutorService, FileAttribute...)}
* </ul>
*
* <p>If this feature is not enabled, the {@link SeekableByteChannel} instances returned by the
* {@code Files} methods will not be {@code FileChannel} instances and the {@code
* FileChannel.open} and {@code AsynchronousFileChannel.open} methods will throw {@link
* UnsupportedOperationException}.
*/
// TODO(cgdecker): Should support for AsynchronousFileChannel be a separate feature?
FILE_CHANNEL
}
| 1,132 |
631 | // Copyright 2018 The Beam Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "aunregister_transaction.h"
#include "../../core/base_tx_builder.h"
#include "core/block_crypt.h"
#include "utility/logger.h"
#include "wallet/core/strings_resources.h"
#include "wallet/core/wallet.h"
namespace beam::wallet
{
BaseTransaction::Ptr AssetUnregisterTransaction::Creator::Create(const TxContext& context)
{
return BaseTransaction::Ptr(new AssetUnregisterTransaction(context));
}
TxParameters AssetUnregisterTransaction::Creator::CheckAndCompleteParameters(const TxParameters& params)
{
if(params.GetParameter<WalletID>(TxParameterID::PeerID))
{
throw InvalidTransactionParametersException("Asset registration: unexpected PeerID");
}
if(params.GetParameter<WalletID>(TxParameterID::MyID))
{
throw InvalidTransactionParametersException("Asset registration: unexpected MyID");
}
const auto isSenderO = params.GetParameter<bool>(TxParameterID::IsSender);
if (!isSenderO || !isSenderO.get())
{
throw InvalidTransactionParametersException("Asset registration: non-sender transaction");
}
const auto isInitiatorO = params.GetParameter<bool>(TxParameterID::IsInitiator);
if (!isInitiatorO || !isInitiatorO.get())
{
throw InvalidTransactionParametersException("Asset registration: non-initiator transaction");
}
TxParameters result{params};
result.SetParameter(TxParameterID::MyID, WalletID(Zero)); // Mandatory parameter
return result;
}
struct AssetUnregisterTransaction::MyBuilder
:public AssetTransaction::Builder
{
using Builder::Builder;
void Sign()
{
if (m_pKrn)
return;
std::unique_ptr<TxKernelAssetDestroy> pKrn = std::make_unique<TxKernelAssetDestroy>();
pKrn->m_AssetID = m_Tx.GetMandatoryParameter<Asset::ID>(TxParameterID::AssetID);
AddKernel(std::move(pKrn));
FinalyzeTx();
}
};
AssetUnregisterTransaction::AssetUnregisterTransaction(const TxContext& context)
: AssetTransaction(TxType::AssetUnreg, context)
{
}
void AssetUnregisterTransaction::UpdateImpl()
{
if (!AssetTransaction::BaseUpdate())
{
return;
}
if (!_builder)
_builder = std::make_shared<MyBuilder>(*this, kDefaultSubTxID);
auto& builder = *_builder;
if (GetState<State>() == State::Initial)
{
LOG_INFO()
<< GetTxID()
<< " Unregistering asset with the owner id " << builder.m_pidAsset
<< ". Refund amount is " << PrintableAmount(Rules::get().CA.DepositForList, false);
UpdateTxDescription(TxStatus::InProgress);
}
if (builder.m_Coins.IsEmpty())
{
//
// ALWAYS refresh asset state before destroying
//
Height ucHeight = 0;
if(GetParameter(TxParameterID::AssetUnconfirmedHeight, ucHeight) && ucHeight != 0)
{
OnFailed(TxFailureReason::AssetConfirmFailed);
return;
}
Height acHeight = 0;
if(!GetParameter(TxParameterID::AssetConfirmedHeight, acHeight) || acHeight == 0)
{
SetState(State::AssetConfirmation);
ConfirmAsset();
return;
}
auto pInfo = GetWalletDB()->findAsset(builder.m_pidAsset);
if (!pInfo)
{
OnFailed(TxFailureReason::NoAssetInfo);
return;
}
WalletAsset& wa = *pInfo;
SetParameter(TxParameterID::AssetID, wa.m_ID);
if (wa.m_Value != Zero)
{
LOG_INFO () << "AID " << wa.m_ID << " value " << AmountBig::get_Lo(wa.m_Value);
OnFailed(TxFailureReason::AssetInUse);
return;
}
if (wa.CanRollback(builder.m_Height.m_Min))
{
OnFailed(TxFailureReason::AssetLocked);
return;
}
BaseTxBuilder::Balance bb(builder);
bb.m_Map[0].m_Value += Rules::get().CA.DepositForList - builder.m_Fee;
bb.CompleteBalance();
builder.SaveCoins();
}
builder.GenerateInOuts();
if (builder.IsGeneratingInOuts())
return;
if (!builder.m_pKrn)
builder.Sign();
auto registered = proto::TxStatus::Unspecified;
if (!GetParameter(TxParameterID::TransactionRegistered, registered))
{
SetState(State::Registration);
GetGateway().register_tx(GetTxID(), builder.m_pTransaction);
return;
}
if (proto::TxStatus::Ok != registered)
{
OnFailed(TxFailureReason::FailedToRegister);
return;
}
Height kpHeight = 0;
GetParameter(TxParameterID::KernelProofHeight, kpHeight);
if (!kpHeight)
{
SetState(State::KernelConfirmation);
ConfirmKernel(builder.m_pKrn->m_Internal.m_ID);
return;
}
SetCompletedTxCoinStatuses(kpHeight);
CompleteTx();
}
bool AssetUnregisterTransaction::IsInSafety() const
{
const auto state = GetState<State>();
return state >= State::KernelConfirmation;
}
}
| 2,706 |
879 | <filename>plugin/localstorage/src/main/java/org/zstack/storage/primary/local/LocalStorageBase.java<gh_stars>100-1000
package org.zstack.storage.primary.local;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.compute.host.VolumeMigrationTargetHostFilter;
import org.zstack.core.asyncbatch.While;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.cloudbus.EventFacade;
import org.zstack.core.componentloader.PluginRegistry;
import org.zstack.core.db.*;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.thread.AsyncThread;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.MergeQueue;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.core.workflow.SimpleFlowChain;
import org.zstack.header.apimediator.ApiMessageInterceptionException;
import org.zstack.header.cluster.ClusterInventory;
import org.zstack.header.cluster.ClusterVO;
import org.zstack.header.cluster.ClusterVO_;
import org.zstack.header.core.*;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.ErrorCodeList;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.host.*;
import org.zstack.header.image.ImageInventory;
import org.zstack.header.image.ImageVO;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.MessageReply;
import org.zstack.header.message.OverlayMessage;
import org.zstack.header.storage.primary.*;
import org.zstack.header.storage.primary.VolumeSnapshotCapability.VolumeSnapshotArrangementType;
import org.zstack.header.storage.snapshot.*;
import org.zstack.header.vm.*;
import org.zstack.header.vo.ResourceVO;
import org.zstack.header.volume.*;
import org.zstack.storage.primary.PrimaryStorageBase;
import org.zstack.storage.primary.PrimaryStorageCapacityUpdater;
import org.zstack.storage.primary.PrimaryStoragePhysicalCapacityManager;
import org.zstack.storage.primary.local.APIGetLocalStorageHostDiskCapacityReply.HostDiskCapacity;
import org.zstack.storage.primary.local.MigrateBitsStruct.ResourceInfo;
import org.zstack.storage.volume.VolumeSystemTags;
import org.zstack.tag.SystemTagCreator;
import org.zstack.utils.CollectionDSL;
import org.zstack.utils.CollectionUtils;
import org.zstack.utils.DebugUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.function.Function;
import org.zstack.utils.gson.JSONObjectUtil;
import org.zstack.utils.logging.CLogger;
import javax.persistence.LockModeType;
import javax.persistence.Tuple;
import javax.persistence.TypedQuery;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static org.zstack.core.Platform.*;
import static org.zstack.core.progress.ProgressReportService.createSubTaskProgress;
import static org.zstack.utils.CollectionDSL.*;
/**
* Created by frank on 6/30/2015.
*/
public class LocalStorageBase extends PrimaryStorageBase {
private static final CLogger logger = Utils.getLogger(LocalStorageBase.class);
@Autowired
private PluginRegistry pluginRgty;
@Autowired
protected PrimaryStorageOverProvisioningManager ratioMgr;
@Autowired
protected PrimaryStoragePhysicalCapacityManager physicalCapacityMgr;
@Autowired
private LocalStorageImageCleaner imageCacheCleaner;
@Autowired
private EventFacade eventf;
static class FactoryCluster {
LocalStorageHypervisorFactory factory;
List<ClusterInventory> clusters;
}
public LocalStorageBase() {
}
public LocalStorageBase(PrimaryStorageVO self) {
super(self);
}
@Override
public void detachHook(String clusterUuid, Completion completion) {
SimpleQuery<ClusterVO> q = dbf.createQuery(ClusterVO.class);
q.select(ClusterVO_.hypervisorType);
q.add(ClusterVO_.uuid, Op.EQ, clusterUuid);
String hvType = q.findValue();
LocalStorageHypervisorFactory f = getHypervisorBackendFactory(hvType);
final LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.detachHook(clusterUuid, new Completion(completion) {
@Override
public void success() {
syncPhysicalCapacity(new ReturnValueCompletion<PhysicalCapacityUsage>(null) {
@Override
public void success(PhysicalCapacityUsage returnValue) {
setCapacity(null, null, returnValue.totalPhysicalSize, returnValue.availablePhysicalSize);
}
@Override
public void fail(ErrorCode errorCode) {
logger.warn(String.format("failed to sync the physical capacity on the local primary storage[uuid:%s], %s",
self.getUuid(), errorCode));
}
});
completion.success();
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
@Override
public void handleApiMessage(APIMessage msg) {
if (msg instanceof APIGetLocalStorageHostDiskCapacityMsg) {
handle((APIGetLocalStorageHostDiskCapacityMsg) msg);
} else if (msg instanceof APILocalStorageMigrateVolumeMsg) {
handle((APILocalStorageMigrateVolumeMsg) msg);
} else if (msg instanceof APILocalStorageGetVolumeMigratableHostsMsg) {
handle((APILocalStorageGetVolumeMigratableHostsMsg) msg);
} else {
super.handleApiMessage(msg);
}
}
@Transactional(readOnly = true)
private void handle(APILocalStorageGetVolumeMigratableHostsMsg msg) {
// this API does the best it can to find migratable hosts.
// it doesn't count the base image size because the image may have
// been deleted, and ZStack has to consult the host for the image size
APILocalStorageGetVolumeMigratableReply reply = new APILocalStorageGetVolumeMigratableReply();
new SQLBatch() {
@Override
protected void scripts() {
//1.count the image size of volume
long size = SQL.New("select vol.size" +
" from VolumeVO vol" +
" where vol.uuid = :uuid")
.param("uuid", msg.getVolumeUuid()).find();
size = ratioMgr.calculateByRatio(self.getUuid(), size);
Long snapshotSize = SQL.New("select sum(sp.size)" +
" from VolumeSnapshotVO sp" +
" where sp.volumeUuid = :volUuid")
.param("volUuid", msg.getVolumeUuid()).find();
if (snapshotSize != null) {
size += snapshotSize;
}
//2.select hosts that have enough capacity
double physicalThreshold = physicalCapacityMgr.getRatio(self.getUuid());
List<String> hostUuids = SQL.New("select href.hostUuid" +
" from LocalStorageHostRefVO href" +
" where href.hostUuid !=" +
" (" +
" select rref.hostUuid" +
" from LocalStorageResourceRefVO rref" +
" where rref.resourceUuid = :volUuid" +
" and rref.resourceType = :rtype" +
" )" +
" and (href.totalPhysicalCapacity * (1.0 - :thres)) <= href.availablePhysicalCapacity" +
" and href.availablePhysicalCapacity != 0" +
" and href.availableCapacity >= :size" +
" and href.primaryStorageUuid = :psUuid" +
" group by href.hostUuid")
.param("volUuid", msg.getVolumeUuid())
.param("rtype", VolumeVO.class.getSimpleName())
.param("thres", physicalThreshold)
.param("size", size)
.param("psUuid", self.getUuid()).list();
if (hostUuids.isEmpty()) {
reply.setInventories(new ArrayList<HostInventory>());
bus.reply(msg, reply);
return;
}
List<HostVO> hosts = new LinkedList<>(SQL.New("select h from HostVO h " +
" where h.uuid in (:uuids)" +
" and h.status = :hstatus")
.param("uuids", hostUuids)
.param("hstatus", HostStatus.Connected).list());
//3.check if the network environment meets the requirement of vm running after migrate When migrate the rootVolume
boolean isRootVolume = Q.New(VolumeVO.class)
.eq(VolumeVO_.uuid, msg.getVolumeUuid())
.eq(VolumeVO_.type, VolumeType.Root)
.isExists();
if (isRootVolume) {
Tuple tuple = Q.New(VmInstanceVO.class)
.select(VmInstanceVO_.clusterUuid, VmInstanceVO_.uuid)
.eq(VmInstanceVO_.rootVolumeUuid, msg.getVolumeUuid()).findTuple();
String originClusterUuid = tuple.get(0, String.class);
String originVmUuid = tuple.get(1, String.class);
if (originClusterUuid == null) {
throw new ApiMessageInterceptionException(
err(SysErrors.INTERNAL, "The clusterUuid of vm cannot be null when migrate the vm"));
}
Iterator<HostVO> it = hosts.iterator();
while (it.hasNext()) {
HostVO hostVO = it.next();
String destClusterUuid = Q.New(HostVO.class).select(HostVO_.clusterUuid)
.eq(HostVO_.uuid, hostVO.getUuid()).findValue();
if (!originClusterUuid.equals(destClusterUuid)) {
List<String> originL2NetworkList = sql("select l2NetworkUuid from L3NetworkVO" +
" where uuid in(select l3NetworkUuid from VmNicVO where vmInstanceUuid = :vmUuid)")
.param("vmUuid", originVmUuid).list();
List<String> l2NetworkList = sql("select l2NetworkUuid from L2NetworkClusterRefVO" +
" where clusterUuid = :clusterUuid")
.param("clusterUuid", destClusterUuid).list();
for (String l2 : originL2NetworkList) {
if (!l2NetworkList.contains(l2)) {
//remove inappropriate host from list
it.remove();
break;
}
}
}
}
}
List<VolumeMigrationTargetHostFilter> exts = pluginRgty.getExtensionList(VolumeMigrationTargetHostFilter.class);
for (VolumeMigrationTargetHostFilter hostFilter : exts) {
hosts = hostFilter.filter(hosts);
}
reply.setInventories(HostInventory.valueOf(hosts));
}
}.execute();
bus.reply(msg, reply);
}
private void handle(final APILocalStorageMigrateVolumeMsg msg) {
final APILocalStorageMigrateVolumeEvent evt = new APILocalStorageMigrateVolumeEvent(msg.getId());
if (self.getState() == PrimaryStorageState.Disabled) {
evt.setError(operr("The primary storage[uuid:%s] is disabled cold migrate is not allowed", msg.getPrimaryStorageUuid()));
bus.publish(evt);
return;
}
class MigrateStruct {
private boolean isRootVolume = false;
private OverlayMessage message;
private String vmUuid;
private boolean volumeStatusChanged = false;
private boolean vmStateChanged = false;
private String vmOriginStateDrivenEvt;
public OverlayMessage getMessage() {
return message;
}
public void setMessage(OverlayMessage message) {
this.message = message;
}
public String getVmUuid() {
return vmUuid;
}
public void setVmUuid(String vmUuid) {
this.vmUuid = vmUuid;
}
public boolean isVolumeStatusChanged() {
return volumeStatusChanged;
}
public void setVolumeStatusChanged(boolean volumeStatusChanged) {
this.volumeStatusChanged = volumeStatusChanged;
}
public boolean isVmStateChanged() {
return vmStateChanged;
}
public void setVmStateChanged(boolean vmStateChanged) {
this.vmStateChanged = vmStateChanged;
}
}
MigrateStruct struct = new MigrateStruct();
VolumeStatus originStatus = Q.New(VolumeVO.class).select(VolumeVO_.status).eq(VolumeVO_.uuid, msg.getVolumeUuid()).findValue();
FlowChain chain = new SimpleFlowChain();
chain.setName(String.format("local-storage-%s-migrate-volume-%s-to-host-%s", msg.getPrimaryStorageUuid(), msg.getVolumeUuid(), msg.getDestHostUuid()));
chain.then(new Flow() {
String __name__ = "change-volume-status-to-migrating";
@Override
public void run(FlowTrigger trigger, Map data) {
ChangeVolumeStatusMsg changeVolumeStatusMsg = new ChangeVolumeStatusMsg();
changeVolumeStatusMsg.setStatus(VolumeStatus.Migrating);
changeVolumeStatusMsg.setVolumeUuid(msg.getVolumeUuid());
bus.makeTargetServiceIdByResourceUuid(changeVolumeStatusMsg, VolumeConstant.SERVICE_ID, msg.getVolumeUuid());
bus.send(changeVolumeStatusMsg, new CloudBusCallBack(changeVolumeStatusMsg) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
struct.setVolumeStatusChanged(true);
trigger.next();
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (struct.isVolumeStatusChanged()) {
ChangeVolumeStatusMsg rollbackMsg = new ChangeVolumeStatusMsg();
rollbackMsg.setStatus(originStatus);
rollbackMsg.setVolumeUuid(msg.getVolumeUuid());
bus.makeTargetServiceIdByResourceUuid(rollbackMsg, VolumeConstant.SERVICE_ID, msg.getVolumeUuid());
bus.send(rollbackMsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
trigger.rollback();
}
});
} else {
trigger.rollback();
}
}
}).then(new Flow() {
String __name__ = "change-vm-state-to-volume-migrating";
@Override
public boolean skip(Map data) {
Tuple t = Q.New(VmInstanceVO.class).select(VmInstanceVO_.uuid, VmInstanceVO_.state)
.eq(VmInstanceVO_.rootVolumeUuid, msg.getVolumeUuid())
.findTuple();
struct.vmUuid = t == null ? null : t.get(0, String.class);
struct.vmOriginStateDrivenEvt = t == null ? null : t.get(1, VmInstanceState.class).getDrivenEvent().toString();
struct.isRootVolume = struct.vmUuid != null;
return !struct.isRootVolume;
}
@Override
public void run(FlowTrigger trigger, Map data) {
ChangeVmStateMsg cmsg = new ChangeVmStateMsg();
cmsg.setStateEvent(VmInstanceStateEvent.volumeMigrating.toString());
cmsg.setVmInstanceUuid(struct.getVmUuid());
bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, struct.getVmUuid());
bus.send(cmsg, new CloudBusCallBack(cmsg) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
struct.setVmStateChanged(true);
trigger.next();
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (struct.isVmStateChanged()) {
ChangeVmStateMsg rollbackMsg = new ChangeVmStateMsg();
rollbackMsg.setStateEvent(struct.vmOriginStateDrivenEvt);
rollbackMsg.setVmInstanceUuid(struct.getVmUuid());
bus.makeTargetServiceIdByResourceUuid(rollbackMsg, VmInstanceConstant.SERVICE_ID, struct.getVmUuid());
bus.send(rollbackMsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
trigger.rollback();
}
});
} else {
trigger.rollback();
}
}
}).then(new NoRollbackFlow() {
String __name__ = "migrate-volume-on-local-storage";
@Override
public void run(FlowTrigger trigger, Map data) {
MigrateVolumeOnLocalStorageMsg mmsg = new MigrateVolumeOnLocalStorageMsg();
mmsg.setPrimaryStorageUuid(msg.getPrimaryStorageUuid());
mmsg.setDestHostUuid(msg.getDestHostUuid());
mmsg.setVolumeUuid(msg.getVolumeUuid());
bus.makeTargetServiceIdByResourceUuid(mmsg, PrimaryStorageConstant.SERVICE_ID, self.getUuid());
MigrateVolumeOverlayMsg omsg = new MigrateVolumeOverlayMsg();
omsg.setMessage(mmsg);
omsg.setVolumeUuid(msg.getVolumeUuid());
bus.makeTargetServiceIdByResourceUuid(omsg, VolumeConstant.SERVICE_ID, msg.getVolumeUuid());
struct.setMessage(omsg);
if (struct.isRootVolume) {
MigrateRootVolumeVmOverlayMsg vmsg = new MigrateRootVolumeVmOverlayMsg();
vmsg.setMessage(omsg);
vmsg.setVmInstanceUuid(struct.getVmUuid());
bus.makeTargetServiceIdByResourceUuid(vmsg, VmInstanceConstant.SERVICE_ID, struct.getVmUuid());
struct.setMessage(vmsg);
}
bus.send(struct.getMessage(), new CloudBusCallBack(struct.getMessage()) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
MigrateVolumeOnLocalStorageReply mr = reply.castReply();
evt.setInventory(mr.getInventory());
trigger.next();
}
});
}
}).then(new NoRollbackFlow() {
String __name__ = "change-vm-state-to-volume-migrated";
@Override
public boolean skip(Map data) {
return !struct.isRootVolume;
}
@Override
public void run(FlowTrigger trigger, Map data) {
ChangeVmStateMsg cmsg = new ChangeVmStateMsg();
cmsg.setStateEvent(VmInstanceStateEvent.volumeMigrated.toString());
cmsg.setVmInstanceUuid(struct.getVmUuid());
bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, struct.getVmUuid());
// if fail, host ping task will sync it state
bus.send(cmsg, new CloudBusCallBack(cmsg) {
@Override
public void run(MessageReply reply) {
// due to the vm sync will set it back, next
trigger.next();
}
});
}
}).then(new NoRollbackFlow() {
String __name__ = "change-volume-status-to-origin";
@Override
public void run(FlowTrigger trigger, Map data) {
ChangeVolumeStatusMsg changeVolumeStatusMsg = new ChangeVolumeStatusMsg();
changeVolumeStatusMsg.setStatus(originStatus);
changeVolumeStatusMsg.setVolumeUuid(msg.getVolumeUuid());
bus.makeTargetServiceIdByResourceUuid(changeVolumeStatusMsg, VolumeConstant.SERVICE_ID, msg.getVolumeUuid());
bus.send(changeVolumeStatusMsg, new CloudBusCallBack(changeVolumeStatusMsg) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
trigger.next();
}
});
}
}).done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
/* update vm last host uuid */
SQL.New(VmInstanceVO.class)
.eq(VmInstanceVO_.uuid, struct.getVmUuid())
.set(VmInstanceVO_.lastHostUuid, msg.getDestHostUuid())
.update();
bus.publish(evt);
}
}).error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setError(errCode);
bus.publish(evt);
}
}).start();
}
private void handle(final MigrateVolumeOnLocalStorageMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return String.format("migrate-volume-%s", msg.getVolumeUuid());
}
@Override
public void run(SyncTaskChain chain) {
migrateVolume(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return getSyncSignature();
}
});
}
private void migrateVolume(MigrateVolumeOnLocalStorageMsg msg, NoErrorCompletion completion) {
MigrateVolumeOnLocalStorageReply reply = new MigrateVolumeOnLocalStorageReply();
SimpleQuery<LocalStorageResourceRefVO> refq = dbf.createQuery(LocalStorageResourceRefVO.class);
refq.add(LocalStorageResourceRefVO_.resourceUuid, Op.EQ, msg.getVolumeUuid());
refq.add(LocalStorageResourceRefVO_.resourceType, Op.EQ, VolumeVO.class.getSimpleName());
LocalStorageResourceRefVO ref = refq.find();
if (ref == null) {
reply.setError(operr("volume[uuid:%s] is not on the local storage anymore," +
"it may have been deleted", msg.getVolumeUuid()));
bus.reply(msg, reply);
completion.done();
return;
}
if (ref.getHostUuid().equals(msg.getDestHostUuid())) {
logger.debug(String.format("the volume[uuid:%s] is already on the host[uuid:%s], no need to migrate",
msg.getVolumeUuid(), msg.getDestHostUuid()));
bus.reply(msg, reply);
completion.done();
return;
}
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("migrate-volume-%s-local-storage-%s-to-host-%s",
msg.getVolumeUuid(), msg.getPrimaryStorageUuid(), msg.getDestHostUuid()));
chain.then(new ShareFlow() {
LocalStorageResourceRefVO volumeRefVO;
List<LocalStorageResourceRefVO> snapshotRefVOS;
LocalStorageResourceRefInventory ref;
long requiredSize;
List<VolumeSnapshotVO> snapshots;
VolumeVO volume;
MigrateBitsStruct struct = new MigrateBitsStruct();
LocalStorageHypervisorBackend bkd;
VolumeStatus originVolumeStatus;
{
SimpleQuery<LocalStorageResourceRefVO> q = dbf.createQuery(LocalStorageResourceRefVO.class);
q.add(LocalStorageResourceRefVO_.resourceType, Op.EQ, VolumeVO.class.getSimpleName());
q.add(LocalStorageResourceRefVO_.resourceUuid, Op.EQ, msg.getVolumeUuid());
volumeRefVO = q.find();
ref = LocalStorageResourceRefInventory.valueOf(volumeRefVO);
SimpleQuery<VolumeSnapshotVO> sq = dbf.createQuery(VolumeSnapshotVO.class);
sq.add(VolumeSnapshotVO_.volumeUuid, Op.EQ, ref.getResourceUuid());
snapshots = sq.list();
volume = dbf.findByUuid(ref.getResourceUuid(), VolumeVO.class);
requiredSize = ratioMgr.calculateByRatio(self.getUuid(), ref.getSize());
ResourceInfo info = new ResourceInfo();
info.setResourceRef(ref);
info.setPath(volume.getInstallPath());
struct.getInfos().add(info);
struct.setDestHostUuid(msg.getDestHostUuid());
struct.setSrcHostUuid(ref.getHostUuid());
struct.setVolume(VolumeInventory.valueOf(volume));
if (!snapshots.isEmpty()) {
List<String> spUuids = CollectionUtils.transformToList(snapshots, ResourceVO::getUuid);
SimpleQuery<LocalStorageResourceRefVO> rq = dbf.createQuery(LocalStorageResourceRefVO.class);
rq.add(LocalStorageResourceRefVO_.resourceType, Op.EQ, VolumeSnapshotVO.class.getSimpleName());
rq.add(LocalStorageResourceRefVO_.resourceUuid, Op.IN, spUuids);
snapshotRefVOS = rq.list();
for (final VolumeSnapshotVO vo : snapshots) {
info = new ResourceInfo();
info.setPath(vo.getPrimaryStorageInstallPath());
info.setResourceRef(CollectionUtils.find(snapshotRefVOS, new Function<LocalStorageResourceRefInventory, LocalStorageResourceRefVO>() {
@Override
public LocalStorageResourceRefInventory call(LocalStorageResourceRefVO arg) {
return arg.getResourceUuid().equals(vo.getUuid()) ? LocalStorageResourceRefInventory.valueOf(arg) : null;
}
}));
if (info.getResourceRef() == null) {
throw new CloudRuntimeException(
String.format("cannot find reference of snapshot[uuid:%s, name:%s] on the local storage[uuid:%s, name:%s]",
vo.getUuid(), vo.getName(), self.getUuid(), self.getName()));
}
struct.getInfos().add(info);
requiredSize += vo.getSize();
}
}
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getDestHostUuid());
bkd = f.getHypervisorBackend(self);
originVolumeStatus = volume.getStatus();
volume.setStatus(VolumeStatus.Migrating);
SQL.New(VolumeVO.class).set(VolumeVO_.status, VolumeStatus.Migrating).eq(VolumeVO_.uuid, volume.getUuid()).update();
}
@Override
public void setup() {
flow(new Flow() {
String __name__ = "reserve-capacity-on-dest-host";
boolean success = false;
@Override
public void run(FlowTrigger trigger, Map data) {
reserveCapacityOnHost(msg.getDestHostUuid(), requiredSize, self.getUuid());
success = true;
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (success) {
returnStorageCapacityToHost(msg.getDestHostUuid(), requiredSize);
}
trigger.rollback();
}
});
List<Flow> flows = bkd.createMigrateBitsVolumeFlow(struct);
for (Flow fl : flows) {
flow(fl);
}
flow(new NoRollbackFlow() {
String __name__ = "change-reference-to-dst-host";
@Override
public void run(FlowTrigger trigger, Map data) {
List<String> resourceUuids = new ArrayList<>();
resourceUuids.add(volumeRefVO.getResourceUuid());
if (snapshotRefVOS != null) {
for (LocalStorageResourceRefVO r : snapshotRefVOS) {
resourceUuids.add(r.getResourceUuid());
}
}
UpdateQuery.New(LocalStorageResourceRefVO.class)
.set(LocalStorageResourceRefVO_.hostUuid, msg.getDestHostUuid())
.condAnd(LocalStorageResourceRefVO_.resourceUuid, Op.IN, resourceUuids)
.update();
trigger.next();
}
});
flow(new NoRollbackFlow() {
String __name__ = "return-capacity-to-src-host";
@Override
public void run(FlowTrigger trigger, Map data) {
returnStorageCapacityToHost(ref.getHostUuid(), requiredSize);
trigger.next();
}
});
flow(new NoRollbackFlow() {
String __name__ = "delete-bits-on-the-src-host";
@Override
public void run(FlowTrigger trigger, Map data) {
List<String> paths = new ArrayList<>();
paths.add(volume.getInstallPath());
for (VolumeSnapshotVO sp : snapshots) {
paths.add(sp.getPrimaryStorageInstallPath());
}
final Iterator<String> it = paths.iterator();
new Runnable() {
@Override
@AsyncThread
public void run() {
if (!it.hasNext()) {
return;
}
final String path = it.next();
bkd.deleteBits(path, struct.getSrcHostUuid(), new Completion(trigger) {
@Override
public void success() {
run();
}
@Override
public void fail(ErrorCode errorCode) {
//TODO GC
logger.warn(String.format("failed to delete %s on the host[uuid:%s], %s",
path, struct.getSrcHostUuid(), errorCode));
run();
}
});
}
}.run();
trigger.next();
}
});
done(new FlowDoneHandler(msg, completion) {
@Override
public void handle(Map data) {
new SQLBatch() {
//migrate the rooVolume and need to update the ClusterUuid of vm
@Override
protected void scripts() {
Boolean isRootVolume = (Q.New(VolumeVO.class).select(VolumeVO_.type)
.eq(VolumeVO_.uuid, volumeRefVO.getResourceUuid())
.findValue() == VolumeType.Root);
if (isRootVolume) {
Tuple tuple = Q.New(VmInstanceVO.class)
.select(VmInstanceVO_.clusterUuid, VmInstanceVO_.uuid)
.eq(VmInstanceVO_.rootVolumeUuid, volumeRefVO.getResourceUuid()).findTuple();
String originClusterUuid = tuple.get(0, String.class);
String vmUuid = tuple.get(1, String.class);
String clusterUuid = Q.New(HostVO.class).select(HostVO_.clusterUuid)
.eq(HostVO_.uuid, msg.getDestHostUuid()).findValue();
if (!originClusterUuid.equals(clusterUuid)) {
sql("update VmInstanceEO" +
" set clusterUuid = :clusterUuid" +
" where uuid = :vmUuid")
.param("clusterUuid", clusterUuid)
.param("vmUuid", vmUuid).execute();
}
}
sql(VolumeVO.class)
.eq(VolumeVO_.uuid, volumeRefVO.getResourceUuid())
.set(VolumeVO_.status, originVolumeStatus)
.update();
LocalStorageResourceRefVO vo = Q.New(LocalStorageResourceRefVO.class)
.eq(LocalStorageResourceRefVO_.resourceUuid, volumeRefVO.getResourceUuid())
.eq(LocalStorageResourceRefVO_.primaryStorageUuid, volumeRefVO.getPrimaryStorageUuid())
.eq(LocalStorageResourceRefVO_.hostUuid, msg.getDestHostUuid())
.find();
reply.setInventory(LocalStorageResourceRefInventory.valueOf(vo));
}
}.execute();
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg, completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
}
});
Finally(new FlowFinallyHandler(msg, completion) {
@Override
public void Finally() {
completion.done();
}
});
}
}).start();
}
@Override
public void handleLocalMessage(Message msg) {
if (msg instanceof InitPrimaryStorageOnHostConnectedMsg) {
handle((InitPrimaryStorageOnHostConnectedMsg) msg);
} else if (msg instanceof RemoveHostFromLocalStorageMsg) {
handle((RemoveHostFromLocalStorageMsg) msg);
} else if (msg instanceof TakeSnapshotMsg) {
handle((TakeSnapshotMsg) msg);
} else if (msg instanceof CheckSnapshotMsg) {
handle((CheckSnapshotMsg) msg);
} else if (msg instanceof BackupVolumeSnapshotFromPrimaryStorageToBackupStorageMsg) {
handle((BackupVolumeSnapshotFromPrimaryStorageToBackupStorageMsg) msg);
} else if (msg instanceof CreateVolumeFromVolumeSnapshotOnPrimaryStorageMsg) {
handle((CreateVolumeFromVolumeSnapshotOnPrimaryStorageMsg) msg);
} else if (msg instanceof DownloadImageToPrimaryStorageCacheMsg) {
handle((DownloadImageToPrimaryStorageCacheMsg) msg);
} else if (msg instanceof LocalStorageCreateEmptyVolumeMsg) {
handle((LocalStorageCreateEmptyVolumeMsg) msg);
} else if (msg instanceof LocalStorageDirectlyDeleteBitsMsg) {
handle((LocalStorageDirectlyDeleteBitsMsg) msg);
} else if (msg instanceof LocalStorageReserveHostCapacityMsg) {
handle((LocalStorageReserveHostCapacityMsg) msg);
} else if (msg instanceof LocalStorageReturnHostCapacityMsg) {
handle((LocalStorageReturnHostCapacityMsg) msg);
} else if (msg instanceof LocalStorageHypervisorSpecificMessage) {
handle((LocalStorageHypervisorSpecificMessage) msg);
} else if (msg instanceof CreateTemporaryVolumeFromSnapshotMsg) {
handle((CreateTemporaryVolumeFromSnapshotMsg) msg);
} else if (msg instanceof UploadBitsFromLocalStorageToBackupStorageMsg) {
handle((UploadBitsFromLocalStorageToBackupStorageMsg) msg);
} else if (msg instanceof GetVolumeRootImageUuidFromPrimaryStorageMsg) {
handle((GetVolumeRootImageUuidFromPrimaryStorageMsg) msg);
} else if (msg instanceof LocalStorageDeleteImageCacheOnPrimaryStorageMsg) {
handle((LocalStorageDeleteImageCacheOnPrimaryStorageMsg) msg);
} else if (msg instanceof MigrateVolumeOnLocalStorageMsg) {
handle((MigrateVolumeOnLocalStorageMsg) msg);
} else if (msg instanceof DownloadBitsFromKVMHostToPrimaryStorageMsg) {
handle((DownloadBitsFromKVMHostToPrimaryStorageMsg) msg);
} else if (msg instanceof CancelDownloadBitsFromKVMHostToPrimaryStorageMsg) {
handle((CancelDownloadBitsFromKVMHostToPrimaryStorageMsg) msg);
} else if ((msg instanceof GetDownloadBitsFromKVMHostProgressMsg)) {
handle((GetDownloadBitsFromKVMHostProgressMsg) msg);
} else if ((msg instanceof LocalStorageRecalculateCapacityMsg)) {
handle((LocalStorageRecalculateCapacityMsg) msg);
} else {
super.handleLocalMessage(msg);
}
}
private void handle(DownloadBitsFromKVMHostToPrimaryStorageMsg msg) {
LocalStorageHypervisorBackend bkd = getHypervisorBackendFactoryByHostUuid(msg.getSrcHostUuid()).getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<DownloadBitsFromKVMHostToPrimaryStorageReply>(msg) {
@Override
public void success(DownloadBitsFromKVMHostToPrimaryStorageReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
DownloadBitsFromKVMHostToPrimaryStorageReply reply = new DownloadBitsFromKVMHostToPrimaryStorageReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(GetDownloadBitsFromKVMHostProgressMsg msg) {
LocalStorageHypervisorBackend bkd = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid()).getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<GetDownloadBitsFromKVMHostProgressReply>(msg) {
@Override
public void success(GetDownloadBitsFromKVMHostProgressReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
GetDownloadBitsFromKVMHostProgressReply reply = new GetDownloadBitsFromKVMHostProgressReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(CancelDownloadBitsFromKVMHostToPrimaryStorageMsg msg) {
LocalStorageHypervisorBackend bkd = getHypervisorBackendFactoryByHostUuid(msg.getDestHostUuid()).getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<CancelDownloadBitsFromKVMHostToPrimaryStorageReply>(msg) {
@Override
public void success(CancelDownloadBitsFromKVMHostToPrimaryStorageReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
CancelDownloadBitsFromKVMHostToPrimaryStorageReply reply = new CancelDownloadBitsFromKVMHostToPrimaryStorageReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
@Override
protected void handle(APICleanUpImageCacheOnPrimaryStorageMsg msg) {
APICleanUpImageCacheOnPrimaryStorageEvent evt = new APICleanUpImageCacheOnPrimaryStorageEvent(msg.getId());
imageCacheCleaner.setForce(msg.isForce());
imageCacheCleaner.cleanup(msg.getUuid(), false);
bus.publish(evt);
}
private void handle(final LocalStorageDeleteImageCacheOnPrimaryStorageMsg msg) {
LocalStorageHypervisorBackend bkd = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid()).getHypervisorBackend(self);
bkd.handle(msg, msg.getHostUuid(), new ReturnValueCompletion<DeleteImageCacheOnPrimaryStorageReply>(msg) {
@Override
public void success(DeleteImageCacheOnPrimaryStorageReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
DeleteImageCacheOnPrimaryStorageReply reply = new DeleteImageCacheOnPrimaryStorageReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(final GetVolumeRootImageUuidFromPrimaryStorageMsg msg) {
String hostUuid = getHostUuidByResourceUuid(msg.getVolume().getUuid());
LocalStorageHypervisorBackend bkd = getHypervisorBackendFactoryByHostUuid(hostUuid).getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new ReturnValueCompletion<GetVolumeRootImageUuidFromPrimaryStorageReply>(msg) {
@Override
public void success(GetVolumeRootImageUuidFromPrimaryStorageReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
GetVolumeRootImageUuidFromPrimaryStorageReply reply = new GetVolumeRootImageUuidFromPrimaryStorageReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(final UploadBitsFromLocalStorageToBackupStorageMsg msg) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, msg.getHostUuid(), new ReturnValueCompletion<UploadBitsFromLocalStorageToBackupStorageReply>(msg) {
@Override
public void success(UploadBitsFromLocalStorageToBackupStorageReply returnValue) {
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
UploadBitsFromLocalStorageToBackupStorageReply reply = new UploadBitsFromLocalStorageToBackupStorageReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(final CreateTemporaryVolumeFromSnapshotMsg msg) {
String hostUuid = getHostUuidByResourceUuid(msg.getSnapshot().getUuid());
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new ReturnValueCompletion<CreateTemporaryVolumeFromSnapshotReply>(msg) {
@Override
public void success(CreateTemporaryVolumeFromSnapshotReply returnValue) {
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
CreateTemporaryVolumeFromSnapshotReply reply = new CreateTemporaryVolumeFromSnapshotReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(LocalStorageHypervisorSpecificMessage msg) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handleHypervisorSpecificMessage(msg);
}
private void handle(LocalStorageReturnHostCapacityMsg msg) {
LocalStorageReturnHostCapacityReply reply = new LocalStorageReturnHostCapacityReply();
long size = msg.isNoOverProvisioning() ? msg.getSize() : ratioMgr.calculateByRatio(self.getUuid(), msg.getSize());
returnStorageCapacityToHost(msg.getHostUuid(), size);
bus.reply(msg, reply);
}
private void handle(LocalStorageReserveHostCapacityMsg msg) {
LocalStorageReserveHostCapacityReply reply = new LocalStorageReserveHostCapacityReply();
long size = msg.isNoOverProvisioning() ? msg.getSize() : ratioMgr.calculateByRatio(self.getUuid(), msg.getSize());
reserveCapacityOnHost(msg.getHostUuid(), size, self.getUuid());
bus.reply(msg, reply);
}
private void handle(final LocalStorageDirectlyDeleteBitsMsg msg) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, msg.getHostUuid(), new ReturnValueCompletion<LocalStorageDirectlyDeleteBitsReply>(msg) {
@Override
public void success(LocalStorageDirectlyDeleteBitsReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
LocalStorageDirectlyDeleteBitsReply reply = new LocalStorageDirectlyDeleteBitsReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(final LocalStorageCreateEmptyVolumeMsg msg) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<LocalStorageCreateEmptyVolumeReply>(msg) {
@Override
public void success(LocalStorageCreateEmptyVolumeReply returnValue) {
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
LocalStorageCreateEmptyVolumeReply reply = new LocalStorageCreateEmptyVolumeReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(APIGetLocalStorageHostDiskCapacityMsg msg) {
APIGetLocalStorageHostDiskCapacityReply reply = new APIGetLocalStorageHostDiskCapacityReply();
if (msg.getHostUuid() != null) {
SimpleQuery<LocalStorageHostRefVO> q = dbf.createQuery(LocalStorageHostRefVO.class);
q.add(LocalStorageHostRefVO_.primaryStorageUuid, Op.EQ, msg.getPrimaryStorageUuid());
q.add(LocalStorageHostRefVO_.hostUuid, Op.EQ, msg.getHostUuid());
LocalStorageHostRefVO ref = q.find();
long total = 0;
long available = 0;
long availablePhy = 0;
long totalPhy = 0;
if (ref == null) {
HostStatus status = Q.New(HostVO.class).select(HostVO_.status)
.eq(HostVO_.uuid, msg.getHostUuid()).findValue();
if (status == HostStatus.Connected) {
reply.setError(err(SysErrors.RESOURCE_NOT_FOUND,
"local primary storage[uuid:%s] doesn't have the host[uuid:%s]",
self.getUuid(), msg.getHostUuid()));
bus.reply(msg, reply);
return;
}
} else {
total = ref.getTotalCapacity();
available = ref.getAvailableCapacity();
availablePhy = ref.getAvailablePhysicalCapacity();
totalPhy = ref.getTotalPhysicalCapacity();
}
HostDiskCapacity c = new HostDiskCapacity();
c.setHostUuid(msg.getHostUuid());
c.setTotalCapacity(total);
c.setAvailableCapacity(available);
c.setAvailablePhysicalCapacity(availablePhy);
c.setTotalPhysicalCapacity(totalPhy);
reply.setInventories(list(c));
} else {
SimpleQuery<LocalStorageHostRefVO> q = dbf.createQuery(LocalStorageHostRefVO.class);
q.add(LocalStorageHostRefVO_.primaryStorageUuid, Op.EQ, msg.getPrimaryStorageUuid());
List<LocalStorageHostRefVO> refs = q.list();
List<HostDiskCapacity> cs = CollectionUtils.transformToList(refs, new Function<HostDiskCapacity, LocalStorageHostRefVO>() {
@Override
public HostDiskCapacity call(LocalStorageHostRefVO ref) {
HostDiskCapacity c = new HostDiskCapacity();
c.setHostUuid(ref.getHostUuid());
c.setTotalCapacity(ref.getTotalCapacity());
c.setAvailableCapacity(ref.getAvailableCapacity());
c.setAvailablePhysicalCapacity(ref.getAvailablePhysicalCapacity());
c.setTotalPhysicalCapacity(ref.getTotalPhysicalCapacity());
return c;
}
});
reply.setInventories(cs);
}
bus.reply(msg, reply);
}
private void handle(final DownloadImageToPrimaryStorageCacheMsg msg) {
ImageInventory imageInventory = msg.getImage();
// If image actualSize is null, Default allow distribute image
long imageActualSize = imageInventory.getActualSize() != null ? imageInventory.getActualSize() : 0;
final DownloadImageToPrimaryStorageCacheReply reply = new DownloadImageToPrimaryStorageCacheReply();
final List<String> hostUuids;
if (msg.getHostUuid() == null) {
hostUuids = new Callable<List<String>>() {
@Override
@Transactional(readOnly = true)
public List<String> call() {
String sql = "select h.hostUuid" +
" from LocalStorageHostRefVO h, HostVO host" +
" where h.primaryStorageUuid = :puuid" +
" and h.hostUuid = host.uuid" +
" and host.status = :hstatus" +
" and h.availablePhysicalCapacity >= :availablePhysicalCapacity";
TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("puuid", self.getUuid());
q.setParameter("hstatus", HostStatus.Connected);
q.setParameter("availablePhysicalCapacity", imageActualSize);
return q.getResultList();
}
}.call();
} else {
hostUuids = list(msg.getHostUuid());
}
if (hostUuids.isEmpty()) {
bus.reply(msg, reply);
return;
}
class HostError {
ErrorCode errorCode;
String hostUuid;
}
class Ret {
List<HostError> errorCodes = new ArrayList<HostError>();
String installPath;
synchronized void addError(HostError err) {
errorCodes.add(err);
}
}
final Ret ret = new Ret();
final AsyncLatch latch = new AsyncLatch(hostUuids.size(), new NoErrorCompletion(msg) {
@Override
public void done() {
if (ret.errorCodes.size() == hostUuids.size()) {
reply.setError(operr("failed to download image[uuid:%s] to all hosts in the local storage[uuid:%s]" +
". %s", msg.getImage().getUuid(), self.getUuid(), JSONObjectUtil.toJsonString(ret.errorCodes)));
} else if (!ret.errorCodes.isEmpty()) {
for (HostError err : ret.errorCodes) {
logger.warn(String.format("failed to download image [uuid:%s] to the host[uuid:%s] in the local" +
" storage[uuid:%s]. %s", msg.getImage().getUuid(), err.hostUuid, self.getUuid(), err.errorCode));
}
}
reply.setInstallPath(ret.installPath);
bus.reply(msg, reply);
}
});
for (final String hostUuid : hostUuids) {
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("download-image-%s-to-local-storage-%s-host-%s", msg.getImage().getUuid(), self.getUuid(), hostUuid));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "download-to-host";
@Override
public void run(final FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.downloadImageToCache(msg.getImage(), hostUuid, new ReturnValueCompletion<String>(trigger) {
@Override
public void success(String returnValue) {
ret.installPath = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(latch) {
@Override
public void handle(Map data) {
latch.ack();
}
});
error(new FlowErrorHandler(latch) {
@Override
public void handle(ErrorCode errCode, Map data) {
HostError herr = new HostError();
herr.errorCode = errCode;
herr.hostUuid = hostUuid;
ret.addError(herr);
latch.ack();
}
});
}
}).start();
}
}
@Override
protected void handle(final MergeVolumeSnapshotOnPrimaryStorageMsg msg) {
final String hostUuid = getHostUuidByResourceUuid(msg.getTo().getUuid());
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new ReturnValueCompletion<MergeVolumeSnapshotOnPrimaryStorageReply>(msg) {
@Override
public void success(MergeVolumeSnapshotOnPrimaryStorageReply returnValue) {
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
MergeVolumeSnapshotOnPrimaryStorageReply r = new MergeVolumeSnapshotOnPrimaryStorageReply();
r.setError(errorCode);
bus.reply(msg, r);
}
});
}
private void handle(final CreateVolumeFromVolumeSnapshotOnPrimaryStorageMsg msg) {
final VolumeSnapshotInventory sinv = msg.getSnapshot();
final String hostUuid = getHostUuidByResourceUuid(sinv.getUuid());
if (hostUuid == null) {
throw new OperationFailureException(inerr(
"the volume snapshot[uuid:%s] is not on the local primary storage[uuid: %s]; the local primary storage" +
" doesn't support the manner of downloading snapshots and creating the volume", sinv.getUuid(), self.getUuid()
));
}
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("create-volume-%s-from-snapshots", msg.getVolumeUuid()));
chain.then(new ShareFlow() {
CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply reply;
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "create-volume";
@Override
public void run(final FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new ReturnValueCompletion<CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply>(msg) {
@Override
public void success(CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply returnValue) {
reply = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new Flow() {
String __name__ = "reserve-capacity-on-host";
Long size;
boolean success = false;
@Override
public void run(FlowTrigger trigger, Map data) {
size = reply.getSize();
reserveCapacityOnHost(hostUuid, size, self.getUuid());
success = true;
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (success) {
returnStorageCapacityToHost(hostUuid, size);
}
trigger.rollback();
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
createResourceRefVO(msg.getVolumeUuid(), VolumeVO.class.getSimpleName(), reply.getSize(), hostUuid);
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply reply = new CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply();
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
private void handle(final BackupVolumeSnapshotFromPrimaryStorageToBackupStorageMsg msg) {
String hostUuid = getHostUuidByResourceUuid(msg.getSnapshot().getUuid());
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new ReturnValueCompletion<BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply>(msg) {
@Override
public void success(BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply returnValue) {
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply reply = new BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
protected void handle(final RevertVolumeFromSnapshotOnPrimaryStorageMsg msg) {
final RevertVolumeFromSnapshotOnPrimaryStorageReply reply = new RevertVolumeFromSnapshotOnPrimaryStorageReply();
String hostUuid = getHostUuidByResourceUuid(msg.getSnapshot().getUuid());
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new ReturnValueCompletion<RevertVolumeFromSnapshotOnPrimaryStorageReply>(msg) {
@Override
public void success(RevertVolumeFromSnapshotOnPrimaryStorageReply returnValue) {
long increment = returnValue.getSize() - msg.getVolume().getSize();
long size = ratioMgr.calculateByRatio(self.getUuid(), increment);
if (size > 0) {
reserveCapacityOnHost(hostUuid, size, msg.getPrimaryStorageUuid());
} else if (size < 0) {
returnStorageCapacityToHost(hostUuid, Math.abs(size));
}
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
protected void handle(final ReInitRootVolumeFromTemplateOnPrimaryStorageMsg msg) {
final ReInitRootVolumeFromTemplateOnPrimaryStorageReply reply = new ReInitRootVolumeFromTemplateOnPrimaryStorageReply();
String hostUuid = getHostUuidByResourceUuid(msg.getVolume().getUuid());
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new ReturnValueCompletion<ReInitRootVolumeFromTemplateOnPrimaryStorageReply>(msg) {
@Override
public void success(ReInitRootVolumeFromTemplateOnPrimaryStorageReply returnValue) {
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
protected String getHostUuidByResourceUuid(String resUuid) {
String huuid;
huuid = new SQLBatchWithReturn<String>() {
private String findHostByUuid(String uuid) {
return sql("select uuid from HostVO where uuid = :uuid", String.class).param("uuid", uuid).find();
}
@Override
protected String scripts() {
String uuid = sql("select hostUuid from LocalStorageResourceRefVO where resourceUuid = :resUuid", String.class)
.param("resUuid", resUuid)
.find();
if (uuid == null) {
throw new OperationFailureException(operr("cannot find any host which has resource[uuid:%s]", resUuid));
} else if (findHostByUuid(uuid) == null) {
throw new OperationFailureException(
operr("Resource[uuid:%s] can only be operated on host[uuid:%s], but the host has been deleted",
resUuid, uuid));
}
return uuid;
}
}.execute();
return huuid;
}
@Override
protected void handle(final DeleteSnapshotOnPrimaryStorageMsg msg) {
final String hostUuid = getHostUuidByResourceUuid(msg.getSnapshot().getUuid());
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-snapshot-%s-on-local-storage-%s", msg.getSnapshot().getUuid(), self.getUuid()));
chain.then(new ShareFlow() {
DeleteSnapshotOnPrimaryStorageReply reply;
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-snapshot-on-host";
@Override
public void run(final FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new ReturnValueCompletion<DeleteSnapshotOnPrimaryStorageReply>(trigger) {
@Override
public void success(DeleteSnapshotOnPrimaryStorageReply returnValue) {
reply = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "return-capacity-to-host";
@Override
public void run(FlowTrigger trigger, Map data) {
returnStorageCapacityToHost(hostUuid, msg.getSnapshot().getSize());
trigger.next();
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
deleteResourceRefVO(msg.getSnapshot().getUuid());
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
DeleteSnapshotOnPrimaryStorageReply reply = new DeleteSnapshotOnPrimaryStorageReply();
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
private void handle(CheckSnapshotMsg msg) {
CheckSnapshotReply reply = new CheckSnapshotReply();
final String hostUuid = getHostUuidByResourceUuid(msg.getVolumeUuid());
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new Completion(msg) {
@Override
public void success() {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(final TakeSnapshotMsg msg) {
final VolumeSnapshotInventory sp = msg.getStruct().getCurrent();
final String hostUuid = getHostUuidByResourceUuid(sp.getVolumeUuid());
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, hostUuid, new ReturnValueCompletion<TakeSnapshotReply>(msg) {
@Override
public void success(TakeSnapshotReply returnValue) {
createResourceRefVO(sp.getUuid(), VolumeSnapshotVO.class.getSimpleName(), returnValue.getInventory().getSize(), hostUuid);
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
TakeSnapshotReply reply = new TakeSnapshotReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handle(RemoveHostFromLocalStorageMsg msg) {
RemoveHostFromLocalStorageReply reply = new RemoveHostFromLocalStorageReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return String.format("remove-host-%s-from-localStorage-%s", msg.getHostUuid(), msg.getPrimaryStorageUuid());
}
@Override
public void run(SyncTaskChain chain) {
removeHostFromLocalStorage(msg, new Completion(chain) {
@Override
public void success() {
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return getSyncSignature();
}
});
}
private void removeHostFromLocalStorage(RemoveHostFromLocalStorageMsg msg, Completion completion) {
LocalStorageHostRefVO ref = Q.New(LocalStorageHostRefVO.class)
.eq(LocalStorageHostRefVO_.hostUuid, msg.getHostUuid())
.eq(LocalStorageHostRefVO_.primaryStorageUuid, msg.getPrimaryStorageUuid())
.find();
List<VolumeVO> volumeVOS = SQL.New("select vo from VolumeVO vo, LocalStorageResourceRefVO ref " +
"where vo.uuid = ref.resourceUuid and ref.hostUuid =:hostUuid " +
"and ref.primaryStorageUuid=:primaryStorageUuid and ref.resourceType=:resourceType " +
"and vo.type=:type")
.param("hostUuid", msg.getHostUuid())
.param("primaryStorageUuid", self.getUuid())
.param("resourceType", VolumeVO.class.getSimpleName())
.param("type", VolumeType.Root)
.list();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName("remove-host-from-localStorage");
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "remove-volume-under-resource";
@Override
public void run(FlowTrigger trigger, Map data) {
//ZSTAC-34201 delete resources under volumes
for (VolumeVO vo : volumeVOS) {
pluginRgty.getExtensionList(VolumeJustBeforeDeleteFromDbExtensionPoint.class).forEach(ext-> ext.volumeJustBeforeDeleteFromDb(VolumeInventory.valueOf(vo)));
}
trigger.next();
}
});
flow(new NoRollbackFlow() {
String __name__ = "remove-vm-under-resource";
@Override
public void run(FlowTrigger trigger, Map data) {
List<String> volumeUuids = volumeVOS.stream().map(VolumeVO::getUuid).collect(Collectors.toList());
if (!volumeUuids.isEmpty()) {
List<VmInstanceVO> vmInstanceVOS = SQL.New("select vm from VmInstanceVO vm where vm.rootVolumeUuid in" +
" (select vol.uuid from VolumeVO vol where vol.uuid in (:volUuids)" +
" and vol.type = :volType)")
.param("volUuids", volumeUuids)
.param("volType", VolumeType.Root).list();
for (VmInstanceVO vo : vmInstanceVOS) {
pluginRgty.getExtensionList(VmJustBeforeDeleteFromDbExtensionPoint.class).forEach(ext-> ext.vmJustBeforeDeleteFromDb(VmInstanceInventory.valueOf(vo)));
}
}
trigger.next();
}
});
flow(new NoRollbackFlow() {
String __name__ = "delete-host-resource";
@Override
public void run(FlowTrigger trigger, Map data) {
//ZSTAC-9635
deleteResourceRef(msg.getHostUuid());
if (ref != null) {
dbf.remove(ref);
decreaseCapacity(ref.getTotalCapacity(),
ref.getAvailableCapacity(),
ref.getTotalPhysicalCapacity(),
ref.getAvailablePhysicalCapacity(),
ref.getSystemUsedCapacity());
}
trigger.next();
}
});
}
}).done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
void deleteResourceRef(String hostUuid) {
new SQLBatch() {
// dirty cleanup database for all possible related entities linking to the local storage.
// basically, we cleanup, volumes, volume snapshots, image caches.
// all below sql must be executed as the order they defined, DO NOT change anything unless you know
// exactly what you are doing.
// the MySQL won't support cascade trigger, which means when you delete a VM its nic will be
// deleted accordingly but the trigger installed on the `AFTER DELETE ON VmNicVO` will not
// be executed.
// so we have to explicitly delete VmNicVO, VolumeVO then VmInstanceVO in order, to make
// mysql triggers work in order to delete entities in AccountResourceRefVO, SystemVO etc.
@Override
protected void scripts() {
// delete the image cache
sql("delete from ImageCacheVO ic where ic.primaryStorageUuid = :psUuid and" +
" ic.installUrl like :url").param("psUuid", self.getUuid())
.param("url", String.format("%%%s%%", hostUuid)).execute();
List<LocalStorageResourceRefVO> refs = sql(
"select ref from LocalStorageResourceRefVO ref where ref.hostUuid = :huuid" +
" and ref.primaryStorageUuid = :psUuid", LocalStorageResourceRefVO.class
).param("huuid", hostUuid).param("psUuid", self.getUuid()).list();
if (refs.isEmpty()) {
return;
}
List<String> volumesUuids = new ArrayList<>();
List<String> snapshotUuids = new ArrayList<>();
List<String> snapshotTreeUuids = new ArrayList<>();
for (LocalStorageResourceRefVO ref : refs) {
if (VolumeVO.class.getSimpleName().equals(ref.getResourceType())) {
volumesUuids.add(ref.getResourceUuid());
} else if (VolumeSnapshotVO.class.getSimpleName().equals(ref.getResourceType())) {
snapshotUuids.add(ref.getResourceUuid());
}
}
if (!snapshotUuids.isEmpty()) {
List<String> treeList = sql(
"select treeUuid from VolumeSnapshotVO where uuid in (:uuids) group by treeUuid", String.class)
.param("uuids", snapshotUuids).list();
if (treeList != null) {
snapshotTreeUuids.addAll(treeList);
}
sql("delete from VolumeSnapshotVO sp where sp.uuid in (:uuids)")
.param("uuids", snapshotUuids).execute();
logger.debug(String.format("delete volume snapshots%s because the host[uuid:%s] is removed from" +
" the local storage[name:%s, uuid:%s]", snapshotUuids, hostUuid, self.getName(), self.getUuid()));
}
if (!snapshotTreeUuids.isEmpty()) {
for (String snapshotTreeUuid : snapshotTreeUuids) {
if (q(VolumeSnapshotVO.class).eq(VolumeSnapshotVO_.treeUuid, snapshotTreeUuid).isExists()) {
break;
}
logger.debug(String.format("volume snapshot tree[uuid:%s] has no leaf, delete it", snapshotTreeUuid));
sql(VolumeSnapshotTreeVO.class).eq(VolumeSnapshotTreeVO_.uuid, snapshotTreeUuid).hardDelete();
}
}
if (!volumesUuids.isEmpty()) {
List<String> vmUuidsToDelete = sql("select vm.uuid from VmInstanceVO vm where vm.rootVolumeUuid in" +
" (select vol.uuid from VolumeVO vol where vol.uuid in (:volUuids)" +
" and vol.type = :volType)", String.class)
.param("volUuids", volumesUuids).param("volType", VolumeType.Root).list();
if (!vmUuidsToDelete.isEmpty()) {
// delete vm nics
sql("delete from VmNicVO nic where nic.vmInstanceUuid in (:uuids)")
.param("uuids", vmUuidsToDelete).execute();
}
if (!vmUuidsToDelete.isEmpty()) {
// delete vm cdrom
sql("delete from VmCdRomVO cdrom where cdrom.vmInstanceUuid in (:uuids)")
.param("uuids", vmUuidsToDelete).execute();
}
// delete volumes including root and data volumes
sql("delete from VolumeVO vol where vol.uuid in (:uuids)")
.param("uuids", volumesUuids).execute();
logger.debug(String.format("delete volumes%s because the host[uuid:%s] is removed from" +
" the local storage[name:%s, uuid:%s]", volumesUuids, hostUuid, self.getName(), self.getUuid()));
if (!vmUuidsToDelete.isEmpty()) {
// delete the vms
sql("delete from VmInstanceVO vm where vm.uuid in (:uuids)")
.param("uuids", vmUuidsToDelete).execute();
logger.debug(String.format("delete VMs%s because the host[uuid:%s] is removed from" +
" the local storage[name:%s, uuid:%s]", vmUuidsToDelete, hostUuid, self.getName(), self.getUuid()));
}
}
for (LocalStorageResourceRefVO ref : refs) {
dbf.getEntityManager().merge(ref);
dbf.getEntityManager().remove(ref);
}
}
}.execute();
}
private void handle(LocalStorageRecalculateCapacityMsg msg) {
recalculateLocalStorageCapacity(msg.isNeedRecalculateRef(), new NoErrorCompletion(msg) {
@Override
public void done() {
LocalStorageRecalculateCapacityReply reply = new LocalStorageRecalculateCapacityReply();
bus.reply(msg, reply);
}
});
}
private void recalculateLocalStorageCapacity(boolean needRecalculateRef, NoErrorCompletion completion) {
thdf.chainSubmit(new ChainTask(completion) {
@Override
public String getSyncSignature() {
return String.format("recalculate-local-storage-capacity-%s", self.getUuid());
}
@Override
public void run(SyncTaskChain chain) {
LocalStorageCapacityRecalculator c = new LocalStorageCapacityRecalculator();
if (needRecalculateRef) {
c.calculateByPrimaryStorageUuid(self.getUuid());
}
c.calculateTotalCapacity(self.getUuid());
completion.done();
chain.next();
}
@Override
public String getName() {
return getSyncSignature();
}
protected int getMaxPendingTasks() {
return 0;
}
protected void exceedMaxPendingCallback() {
completion.done();
}
// separate calculate all refs and calculate total to different sub queue
protected String getDeduplicateString() {
return needRecalculateRef ? "calculate-all-refs" : "calculate-total";
}
});
}
@Override
protected void handle(RecalculatePrimaryStorageCapacityMsg msg) {
recalculateLocalStorageCapacity(true, new NoErrorCompletion(msg) {
@Override
public void done() {
RecalculatePrimaryStorageCapacityReply reply = new RecalculatePrimaryStorageCapacityReply();
bus.reply(msg, reply);
}
});
}
protected void handle(final InitPrimaryStorageOnHostConnectedMsg msg) {
final InitPrimaryStorageOnHostConnectedReply reply = new InitPrimaryStorageOnHostConnectedReply();
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName("init primarystorage on host connected");
chain.then(new NoRollbackFlow() {
String __name__ = "initial db";
@Override
public void run(FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid(), false);
final LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<PhysicalCapacityUsage>(msg) {
@Override
public void success(PhysicalCapacityUsage usage) {
LocalStoragePhysicalCapacityUsage c = (LocalStoragePhysicalCapacityUsage)usage;
List<LocalStorageHostRefVO> refs = Q.New(LocalStorageHostRefVO.class)
.eq(LocalStorageHostRefVO_.hostUuid, msg.getHostUuid())
.eq(LocalStorageHostRefVO_.primaryStorageUuid, self.getUuid())
.list();
LocalStorageHostRefVO ref;
if (refs == null || refs.isEmpty()) {
ref = new LocalStorageHostRefVO();
ref.setTotalCapacity(c.totalPhysicalSize);
ref.setAvailableCapacity(c.availablePhysicalSize);
ref.setTotalPhysicalCapacity(c.totalPhysicalSize);
ref.setAvailablePhysicalCapacity(c.availablePhysicalSize);
ref.setHostUuid(msg.getHostUuid());
ref.setPrimaryStorageUuid(self.getUuid());
ref.setSystemUsedCapacity(c.totalPhysicalSize - c.availablePhysicalSize - c.localStorageUsedSize);
dbf.persist(ref);
increaseCapacity(
c.totalPhysicalSize,
c.availablePhysicalSize,
c.totalPhysicalSize,
c.availablePhysicalSize,
ref.getSystemUsedCapacity());
} else {
ref = refs.get(0);
long originSystemUsed = ref.getSystemUsedCapacity();
ref.setAvailablePhysicalCapacity(c.availablePhysicalSize);
ref.setTotalPhysicalCapacity(c.totalPhysicalSize);
ref.setTotalCapacity(c.totalPhysicalSize);
ref.setSystemUsedCapacity(c.totalPhysicalSize - c.availablePhysicalSize - c.localStorageUsedSize);
dbf.update(ref);
if (originSystemUsed != ref.getSystemUsedCapacity()) {
increaseCapacity(null, null, null, null, ref.getSystemUsedCapacity() - originSystemUsed);
}
// the host's local storage capacity changed
// need to recalculate the capacity in the database
LocalStorageRecalculateCapacityMsg rmsg = new LocalStorageRecalculateCapacityMsg();
rmsg.setPrimaryStorageUuid(self.getUuid());
rmsg.setNeedRecalculateRef(false);
bus.makeTargetServiceIdByResourceUuid(rmsg, PrimaryStorageConstant.SERVICE_ID, self.getUuid());
bus.send(rmsg);
}
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).then(new NoRollbackFlow() {
String __name__ = "create initailized file";
@Override
public void run(FlowTrigger trigger, Map data) {
HostInventory host = HostInventory.valueOf(dbf.findByUuid(msg.getHostUuid(), HostVO.class));
checkLocalStoragePrimaryStorageInitilized(CollectionDSL.list(host), true, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.reply(msg, reply);
}
}).error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
}
}).start();
}
@ExceptionSafe
protected void reserveCapaciryOnHostIgnoreError(String hostUuid, long size, String psUuid) {
new LocalStorageUtils().reserveCapacityOnHost(hostUuid, size, psUuid, self, true);
}
protected void reserveCapacityOnHost(String hostUuid, long size, String psUuid) {
new LocalStorageUtils().reserveCapacityOnHost(hostUuid, size, psUuid, self, false);
}
@Transactional
protected void returnStorageCapacityToHost(String hostUuid, long size) {
new LocalStorageUtils().returnStorageCapacityToHost(hostUuid, size, self);
}
@Transactional
protected void returnStorageCapacityToHostByResourceUuid(String resUuid) {
String sql = "select href, rref" +
" from LocalStorageHostRefVO href, LocalStorageResourceRefVO rref" +
" where href.hostUuid = rref.hostUuid" +
" and href.primaryStorageUuid = rref.primaryStorageUuid" +
" and rref.resourceUuid = :resUuid" +
" and rref.primaryStorageUuid = :puuid";
TypedQuery<Tuple> q = dbf.getEntityManager().createQuery(sql, Tuple.class);
q.setLockMode(LockModeType.PESSIMISTIC_WRITE);
q.setParameter("resUuid", resUuid);
q.setParameter("puuid", self.getUuid());
List<Tuple> tupleList = q.getResultList();
if (tupleList == null || tupleList.isEmpty()) {
return;
}
DebugUtils.Assert(tupleList.size() == 1,
"should not get more than one LocalStorageHostRefVO/LocalStorageResourceRefVO");
Tuple twoRefs = tupleList.get(0);
LocalStorageHostRefVO href = twoRefs.get(0, LocalStorageHostRefVO.class);
LocalStorageResourceRefVO rref = twoRefs.get(1, LocalStorageResourceRefVO.class);
long requiredSize = rref.getSize();
if (VolumeVO.class.getSimpleName().equals(rref.getResourceType())) {
requiredSize = ratioMgr.calculateByRatio(self.getUuid(), requiredSize);
}
LocalStorageHostCapacityStruct s = new LocalStorageHostCapacityStruct();
s.setSizeBeforeOverProvisioning(rref.getSize());
s.setHostUuid(href.getHostUuid());
s.setLocalStorage(getSelfInventory());
s.setSize(requiredSize);
for (LocalStorageReturnHostCapacityExtensionPoint ext : pluginRgty.getExtensionList(
LocalStorageReturnHostCapacityExtensionPoint.class)) {
ext.beforeReturnLocalStorageCapacityOnHost(s);
}
LocalStorageUtils.logCapacityChange(self.getUuid(), href.getHostUuid(), href.getAvailableCapacity(), href.getAvailableCapacity() + s.getSize());
href.setAvailableCapacity(href.getAvailableCapacity() + s.getSize());
dbf.getEntityManager().merge(href);
}
@Override
protected void handle(final InstantiateVolumeOnPrimaryStorageMsg msg) {
createSubTaskProgress("create a volume[%s] on the local storage", msg.getVolume().getType());
String hostUuid = msg.getDestHost().getUuid();
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(hostUuid);
final LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("instantiate-volume-%s-local-primary-storage-%s", msg.getVolume().getUuid(), self.getUuid()));
final String finalHostUuid = hostUuid;
chain.then(new ShareFlow() {
InstantiateVolumeOnPrimaryStorageReply reply;
@Override
public void setup() {
flow(new Flow() {
String __name__ = "allocate-capacity-on-host";
long requiredSize = ratioMgr.calculateByRatio(self.getUuid(), msg.getVolume().getSize());
long reservedSize;
@Override
public void run(FlowTrigger trigger, Map data) {
reserveCapacityOnHost(finalHostUuid, requiredSize, self.getUuid());
reservedSize = requiredSize;
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (reservedSize != 0) {
returnStorageCapacityToHost(finalHostUuid, reservedSize);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "instantiate-volume-on-host";
@Override
public void run(final FlowTrigger trigger, Map data) {
bkd.handle(msg, new ReturnValueCompletion<InstantiateVolumeOnPrimaryStorageReply>(msg) {
@Override
public void success(InstantiateVolumeOnPrimaryStorageReply returnValue) {
reply = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
createResourceRefVO(msg.getVolume().getUuid(), VolumeVO.class.getSimpleName(),
msg.getVolume().getSize(), finalHostUuid);
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
InstantiateVolumeOnPrimaryStorageReply reply = new InstantiateVolumeOnPrimaryStorageReply();
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
private void deleteResourceRefVO(String resourceUuid) {
SimpleQuery<LocalStorageResourceRefVO> q = dbf.createQuery(LocalStorageResourceRefVO.class);
q.add(LocalStorageResourceRefVO_.primaryStorageUuid, Op.EQ, self.getUuid());
q.add(LocalStorageResourceRefVO_.resourceUuid, Op.EQ, resourceUuid);
LocalStorageResourceRefVO ref = q.find();
if (ref != null) {
dbf.remove(ref);
}
}
private void createResourceRefVO(String resUuid, String resType, long size, String hostUuid) {
LocalStorageResourceRefVO ref = new LocalStorageResourceRefVO();
ref.setPrimaryStorageUuid(self.getUuid());
ref.setSize(size);
ref.setResourceType(resType);
ref.setResourceUuid(resUuid);
ref.setHostUuid(hostUuid);
dbf.persist(ref);
}
@Override
protected void handle(final DeleteVolumeOnPrimaryStorageMsg msg) {
String sql = "select host" +
" from LocalStorageResourceRefVO h, HostVO host" +
" where h.hostUuid = host.uuid" +
" and h.resourceUuid = :resourceUuid" +
" and h.resourceType = :resourceType";
TypedQuery<HostVO> q = dbf.getEntityManager().createQuery(sql, HostVO.class);
q.setParameter("resourceUuid", msg.getVolume().getUuid());
q.setParameter("resourceType", VolumeVO.class.getSimpleName());
if (q.getResultList().isEmpty()) {
logger.debug(String.format("volume[uuid:%s] is not on the local storage[uuid:%s, name:%s]," +
"the host the volume is on may have been deleted",
msg.getVolume().getUuid(), self.getUuid(), self.getName()));
DeleteVolumeOnPrimaryStorageReply reply = new DeleteVolumeOnPrimaryStorageReply();
bus.reply(msg, reply);
return;
}
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-volume-%s-local-primary-storage-%s", msg.getVolume().getUuid(), self.getUuid()));
chain.then(new ShareFlow() {
DeleteVolumeOnPrimaryStorageReply reply;
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-volume-on-host";
@Override
public void run(final FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByResourceUuid(msg.getVolume().getUuid(), VolumeVO.class.getSimpleName());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<DeleteVolumeOnPrimaryStorageReply>(msg) {
@Override
public void success(DeleteVolumeOnPrimaryStorageReply returnValue) {
reply = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "return-capacity-to-host";
@Override
public void run(FlowTrigger trigger, Map data) {
returnStorageCapacityToHostByResourceUuid(msg.getVolume().getUuid());
trigger.next();
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
deleteResourceRefVO(msg.getVolume().getUuid());
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
DeleteVolumeOnPrimaryStorageReply reply = new DeleteVolumeOnPrimaryStorageReply();
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
@Override
protected void handle(CreateImageCacheFromVolumeOnPrimaryStorageMsg msg) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByResourceUuid(msg.getVolumeInventory().getUuid(), VolumeVO.class.getSimpleName());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<CreateImageCacheFromVolumeOnPrimaryStorageReply>(msg) {
@Override
public void success(CreateImageCacheFromVolumeOnPrimaryStorageReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
CreateImageCacheFromVolumeOnPrimaryStorageReply r = new CreateImageCacheFromVolumeOnPrimaryStorageReply();
r.setError(errorCode);
bus.reply(msg, r);
}
});
}
@Override
protected void handle(CreateImageCacheFromVolumeSnapshotOnPrimaryStorageMsg msg) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByResourceUuid(msg.getVolumeSnapshot().getUuid(), VolumeSnapshotVO.class.getSimpleName());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<CreateImageCacheFromVolumeSnapshotOnPrimaryStorageReply>(msg) {
@Override
public void success(CreateImageCacheFromVolumeSnapshotOnPrimaryStorageReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
CreateImageCacheFromVolumeSnapshotOnPrimaryStorageReply r = new CreateImageCacheFromVolumeSnapshotOnPrimaryStorageReply();
r.setError(errorCode);
bus.reply(msg, r);
}
});
}
@Override
protected void handle(CreateTemplateFromVolumeOnPrimaryStorageMsg msg) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByResourceUuid(msg.getVolumeInventory().getUuid(), VolumeVO.class.getSimpleName());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<CreateTemplateFromVolumeOnPrimaryStorageReply>(msg) {
@Override
public void success(CreateTemplateFromVolumeOnPrimaryStorageReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
CreateTemplateFromVolumeOnPrimaryStorageReply r = new CreateTemplateFromVolumeOnPrimaryStorageReply();
r.setError(errorCode);
bus.reply(msg, r);
}
});
}
@Override
protected void handle(final DownloadDataVolumeToPrimaryStorageMsg msg) {
if (msg.getHostUuid() == null) {
throw new OperationFailureException(operr("unable to create the data volume[uuid: %s] on a local primary storage[uuid:%s], because the hostUuid is not specified.",
msg.getVolumeUuid(), self.getUuid()));
}
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("download-data-volume-%s-to-local-storage-%s", msg.getVolumeUuid(), self.getUuid()));
chain.then(new ShareFlow() {
DownloadDataVolumeToPrimaryStorageReply reply;
long requiredSize = ratioMgr.calculateByRatio(self.getUuid(), msg.getImage().getSize());
@Override
public void setup() {
flow(new Flow() {
String __name__ = "allocate-capacity-on-host";
@Override
public void run(FlowTrigger trigger, Map data) {
reserveCapacityOnHost(msg.getHostUuid(), requiredSize, self.getUuid());
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
returnStorageCapacityToHost(msg.getHostUuid(), requiredSize);
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "download-the-data-volume-to-host";
@Override
public void run(final FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<DownloadDataVolumeToPrimaryStorageReply>(trigger) {
@Override
public void success(DownloadDataVolumeToPrimaryStorageReply returnValue) {
reply = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
createResourceRefVO(msg.getVolumeUuid(), VolumeVO.class.getSimpleName(), msg.getImage().getSize(), msg.getHostUuid());
saveVolumeProvisioningStrategy(msg.getVolumeUuid(), VolumeProvisioningStrategy.ThinProvisioning);
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
DownloadDataVolumeToPrimaryStorageReply reply = new DownloadDataVolumeToPrimaryStorageReply();
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
@Override
protected void handle(GetInstallPathForDataVolumeDownloadMsg msg) {
if (msg.getHostUuid() == null) {
throw new OperationFailureException(operr("unable to create the data volume[uuid: %s] on a local primary storage[uuid:%s], because the hostUuid is not specified.",
msg.getVolumeUuid(), self.getUuid()));
}
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<GetInstallPathForDataVolumeDownloadReply>(msg) {
@Override
public void success(GetInstallPathForDataVolumeDownloadReply returnValue) {
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
GetInstallPathForDataVolumeDownloadReply reply = new GetInstallPathForDataVolumeDownloadReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
@Override
protected void handle(final DeleteVolumeBitsOnPrimaryStorageMsg msg) {
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-volume-bits-on-local-primary-storage-%s", self.getUuid()));
chain.then(new ShareFlow() {
DeleteVolumeBitsOnPrimaryStorageReply reply;
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-volume-bits-on-host";
@Override
public void run(final FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = msg.getHypervisorType() != null ? getHypervisorBackendFactory(msg.getHypervisorType()) :
getHypervisorBackendFactoryByResourceUuid(msg.getBitsUuid(), msg.getBitsType());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<DeleteVolumeBitsOnPrimaryStorageReply>(msg) {
@Override
public void success(DeleteVolumeBitsOnPrimaryStorageReply returnValue) {
reply = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "return-capacity-to-host";
@Override
public void run(FlowTrigger trigger, Map data) {
returnStorageCapacityToHostByResourceUuid(msg.getBitsUuid());
trigger.next();
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
if (!msg.isFromRecycle()) {
deleteResourceRefVO(msg.getBitsUuid());
}
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
DeleteVolumeBitsOnPrimaryStorageReply reply = new DeleteVolumeBitsOnPrimaryStorageReply();
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
@Override
protected void handle(DownloadVolumeTemplateToPrimaryStorageMsg msg) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<DownloadVolumeTemplateToPrimaryStorageReply>(msg) {
DownloadVolumeTemplateToPrimaryStorageReply reply = new DownloadVolumeTemplateToPrimaryStorageReply();
@Override
public void success(DownloadVolumeTemplateToPrimaryStorageReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
@Override
protected void handle(final DeleteBitsOnPrimaryStorageMsg msg) {
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-bits-on-local-primary-storage-%s", self.getUuid()));
chain.then(new ShareFlow() {
DeleteBitsOnPrimaryStorageReply reply;
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-bits-on-host";
@Override
public void run(final FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getHostUuid());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<DeleteBitsOnPrimaryStorageReply>(msg) {
@Override
public void success(DeleteBitsOnPrimaryStorageReply returnValue) {
reply = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
@Override
protected void handle(final DownloadIsoToPrimaryStorageMsg msg) {
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("download-iso-%s-local-primary-storage-%s", msg.getIsoSpec().getInventory().getUuid(), self.getUuid()));
chain.then(new ShareFlow() {
DownloadIsoToPrimaryStorageReply reply;
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "download-iso-to-host";
@Override
public void run(final FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByHostUuid(msg.getDestHostUuid());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<DownloadIsoToPrimaryStorageReply>(msg) {
@Override
public void success(DownloadIsoToPrimaryStorageReply returnValue) {
reply = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
boolean isExists = Q.New(LocalStorageResourceRefVO.class)
.eq(LocalStorageResourceRefVO_.resourceUuid, msg.getIsoSpec().getInventory().getUuid())
.eq(LocalStorageResourceRefVO_.resourceType, ImageVO.class.getSimpleName())
.eq(LocalStorageResourceRefVO_.primaryStorageUuid, self.getUuid())
.eq(LocalStorageResourceRefVO_.hostUuid, msg.getDestHostUuid())
.isExists();
if (!isExists) {
createResourceRefVO(
msg.getIsoSpec().getInventory().getUuid(),
ImageVO.class.getSimpleName(),
msg.getIsoSpec().getInventory().getActualSize(),
msg.getDestHostUuid()
);
}
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
DownloadIsoToPrimaryStorageReply reply = new DownloadIsoToPrimaryStorageReply();
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
@Override
protected void handle(final DeleteIsoFromPrimaryStorageMsg msg) {
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-iso-local-primary-storage-%s", msg.getIsoSpec().getInventory().getUuid()));
chain.then(new ShareFlow() {
DeleteIsoFromPrimaryStorageReply reply;
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-iso-on-host";
@Override
public void run(final FlowTrigger trigger, Map data) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByResourceUuid(
msg.getIsoSpec().getInventory().getUuid(), ImageVO.class.getSimpleName());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<DeleteIsoFromPrimaryStorageReply>(msg) {
@Override
public void success(DeleteIsoFromPrimaryStorageReply returnValue) {
reply = returnValue;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "return-capacity-to-host";
@Override
public void run(FlowTrigger trigger, Map data) {
returnStorageCapacityToHostByResourceUuid(msg.getIsoSpec().getInventory().getUuid());
trigger.next();
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
deleteResourceRefVO(msg.getIsoSpec().getInventory().getUuid());
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
DeleteIsoFromPrimaryStorageReply reply = new DeleteIsoFromPrimaryStorageReply();
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
@Override
protected void handle(AskVolumeSnapshotCapabilityMsg msg) {
AskVolumeSnapshotCapabilityReply reply = new AskVolumeSnapshotCapabilityReply();
VolumeSnapshotCapability capability = new VolumeSnapshotCapability();
capability.setSupport(true);
String volumeType = msg.getVolume().getType();
if (VolumeType.Data.toString().equals(volumeType) || VolumeType.Root.toString().equals(volumeType)) {
capability.setArrangementType(VolumeSnapshotArrangementType.CHAIN);
} else if (VolumeType.Memory.toString().equals(volumeType)) {
capability.setArrangementType(VolumeSnapshotArrangementType.INDIVIDUAL);
} else {
throw new CloudRuntimeException(String.format("unknown volume type %s", volumeType));
}
reply.setCapability(capability);
bus.reply(msg, reply);
}
@Override
protected void handle(final SyncVolumeSizeOnPrimaryStorageMsg msg) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactoryByResourceUuid(msg.getVolumeUuid(), VolumeVO.class.getSimpleName());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
String huuid = getHostUuidByResourceUuid(msg.getVolumeUuid());
bkd.handle(msg, huuid, new ReturnValueCompletion<SyncVolumeSizeOnPrimaryStorageReply>(msg) {
@Override
public void success(SyncVolumeSizeOnPrimaryStorageReply returnValue) {
saveVolumeProvisioningStrategy(msg.getVolumeUuid(), returnValue.getActualSize() < returnValue.getSize() ? VolumeProvisioningStrategy.ThinProvisioning : VolumeProvisioningStrategy.ThickProvisioning);
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
SyncVolumeSizeOnPrimaryStorageReply reply = new SyncVolumeSizeOnPrimaryStorageReply();
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
protected void saveVolumeProvisioningStrategy(String volumeUuid, VolumeProvisioningStrategy strategy) {
if (!VolumeSystemTags.VOLUME_PROVISIONING_STRATEGY.hasTag(volumeUuid)) {
SystemTagCreator tagCreator = VolumeSystemTags.VOLUME_PROVISIONING_STRATEGY.newSystemTagCreator(volumeUuid);
tagCreator.setTagByTokens(
map(e(VolumeSystemTags.VOLUME_PROVISIONING_STRATEGY_TOKEN, strategy))
);
tagCreator.inherent = false;
tagCreator.create();
}
}
protected void setCapacity(Long total, Long avail, Long totalPhysical, Long availPhysical) {
PrimaryStorageCapacityUpdater updater = new PrimaryStorageCapacityUpdater(self.getUuid());
updater.update(total, avail, totalPhysical, availPhysical);
}
void increaseCapacity(final Long total,
final Long avail,
final Long totalPhysical,
final Long availPhysical,
final Long systemUsed) {
PrimaryStorageCapacityUpdater updater = new PrimaryStorageCapacityUpdater(self.getUuid());
updater.run(new PrimaryStorageCapacityUpdaterRunnable() {
@Override
public PrimaryStorageCapacityVO call(PrimaryStorageCapacityVO cap) {
if (total != null) {
cap.setTotalCapacity(cap.getTotalCapacity() + total);
}
if (avail != null) {
cap.setAvailableCapacity(cap.getAvailableCapacity() + avail);
}
if (totalPhysical != null) {
cap.setTotalPhysicalCapacity(cap.getTotalPhysicalCapacity() + totalPhysical);
}
if (availPhysical != null) {
cap.setAvailablePhysicalCapacity(cap.getAvailablePhysicalCapacity() + availPhysical);
}
if (systemUsed != null) {
if (cap.getSystemUsedCapacity() == null) {
cap.setSystemUsedCapacity(0L);
}
cap.setSystemUsedCapacity(cap.getSystemUsedCapacity() + systemUsed);
}
return cap;
}
});
}
void decreaseCapacity(final Long total,
final Long avail,
final Long totalPhysical,
final Long availPhysical,
final Long systemUsed) {
PrimaryStorageCapacityUpdater updater = new PrimaryStorageCapacityUpdater(self.getUuid());
updater.run(new PrimaryStorageCapacityUpdaterRunnable() {
@Override
public PrimaryStorageCapacityVO call(PrimaryStorageCapacityVO cap) {
String beforeCapacity = String.format("[totalCapacity: %s, availableCapacity: %s, totalPhysicalCapacity: %s, " +
"availablePhysicalCapacity: %s]", cap.getTotalCapacity(), cap.getAvailableCapacity(),
cap.getTotalPhysicalCapacity(), cap.getAvailablePhysicalCapacity());
if (total != null) {
long t = cap.getTotalCapacity() - total;
cap.setTotalCapacity(t < 0 ? 0 : t);
}
if (avail != null) {
// for over-provisioning scenarios, minus value of available capacity is permitted
long a = cap.getAvailableCapacity() - avail;
cap.setAvailableCapacity(a);
}
if (totalPhysical != null) {
long tp = cap.getTotalPhysicalCapacity() - totalPhysical;
cap.setTotalPhysicalCapacity(tp < 0 ? 0 : tp);
}
if (availPhysical != null) {
long ap = cap.getAvailablePhysicalCapacity() - availPhysical;
cap.setAvailablePhysicalCapacity(ap < 0 ? 0 : ap);
}
if (systemUsed != null) {
long su = cap.getSystemUsedCapacity() - systemUsed;
cap.setSystemUsedCapacity(su < 0 ? 0 : su);
}
String nowCapacity = String.format("[totalCapacity: %s, availableCapacity: %s, totalPhysicalCapacity: %s, " +
"availablePhysicalCapacity: %s]", cap.getTotalCapacity(), cap.getAvailableCapacity(),
cap.getTotalPhysicalCapacity(), cap.getAvailablePhysicalCapacity());
logger.info(String.format("decrease local primary storage[uuid: %s] capacity, changed capacity from %s to %s", cap.getUuid(), beforeCapacity, nowCapacity));
return cap;
}
});
}
@Transactional(readOnly = true)
protected List<FactoryCluster> getAllFactoriesForAttachedClusters() {
String sql = "select cluster" +
" from ClusterVO cluster, PrimaryStorageClusterRefVO ref" +
" where ref.clusterUuid = cluster.uuid" +
" and ref.primaryStorageUuid = :uuid";
TypedQuery<ClusterVO> q = dbf.getEntityManager().createQuery(sql, ClusterVO.class);
q.setParameter("uuid", self.getUuid());
List<ClusterVO> clusters = q.getResultList();
if (clusters.isEmpty()) {
return new ArrayList<>();
}
Map<String, FactoryCluster> m = new HashMap<>();
for (ClusterVO c : clusters) {
FactoryCluster fc = m.get(c.getHypervisorType());
if (fc == null) {
fc = new FactoryCluster();
fc.factory = getHypervisorBackendFactory(c.getHypervisorType());
fc.clusters = new ArrayList<>();
m.put(c.getHypervisorType(), fc);
}
fc.clusters.add(ClusterInventory.valueOf(c));
}
return new ArrayList<FactoryCluster>(m.values());
}
@Override
protected void connectHook(final ConnectParam param, final Completion completion) {
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName("connect localstorage host hook");
chain.then(new NoRollbackFlow() {
String __name__ = "check localstorage initilized on host";
@Override
public void run(FlowTrigger trigger, Map data) {
checkLocalStoragePrimaryStorageInitilized(param.isNewAdded(), true, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).then(new NoRollbackFlow() {
String __name__ = "recaculate primarystorage capacity";
@Override
public void run(FlowTrigger trigger, Map data) {
LocalStorageRecalculateCapacityMsg rmsg = new LocalStorageRecalculateCapacityMsg();
rmsg.setPrimaryStorageUuid(self.getUuid());
bus.makeLocalServiceId(rmsg, PrimaryStorageConstant.SERVICE_ID);
bus.send(rmsg);
trigger.next();
}
}).done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
private List<HostInventory> getLocalStorageHosts() {
return new Callable<List<HostInventory>>() {
@Override
@Transactional(readOnly = true)
public List<HostInventory> call() {
String sql = "select host" +
" from LocalStorageHostRefVO h, HostVO host" +
" where h.primaryStorageUuid = :puuid" +
" and h.hostUuid = host.uuid" +
" and host.status != :hstatus";
TypedQuery<HostVO> q = dbf.getEntityManager().createQuery(sql, HostVO.class);
q.setParameter("puuid", self.getUuid());
q.setParameter("hstatus", HostStatus.Disconnected);
return HostInventory.valueOf(q.getResultList());
}
}.call();
}
private void sendWarnning(String hostUuid, String details, PrimaryStorageInventory ps) {
HostCanonicalEvents.HostMountData data = new HostCanonicalEvents.HostMountData();
data.hostUuid = hostUuid;
data.psUuid = ps.getUuid();
data.details = details;
eventf.fire(HostCanonicalEvents.HOST_CHECK_INITIALIZED_FAILED, data);
}
private boolean hostHasInitializedTag(String hostUuid) {
List<Map<String, String>> tags = LocalStorageSystemTags.LOCALSTORAGE_HOST_INITIALIZED.getTokensOfTagsByResourceUuid(hostUuid);
for (Map<String, String> tag : tags) {
if (tag.get(LocalStorageSystemTags.LOCALSTORAGE_HOST_INITIALIZED_TOKEN).equals(self.getUuid())) {
return true;
}
}
return false;
}
private void checkLocalStoragePrimaryStorageInitilized(boolean isNewAdded, boolean initialized, Completion completion) {
List<HostInventory> hosts = getLocalStorageHosts();
if (!isNewAdded && hosts.size() == 0) {
completion.fail(operr("No Host state is Enabled, Please check the availability of the host"));
} else {
checkLocalStoragePrimaryStorageInitilized(hosts, initialized, completion);
}
}
private void checkLocalStoragePrimaryStorageInitilized(List<HostInventory> hosts, boolean initialized, Completion completion) {
new While<>(hosts).all((host, com) -> {
if (hostHasInitializedTag(host.getUuid())) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactory(host.getHypervisorType());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.checkHostAttachedPSMountPath(host.getUuid(), new Completion(com) {
@Override
public void success() {
com.done();
}
@Override
public void fail(ErrorCode errorCode) {
sendWarnning(host.getUuid(), errorCode.getDetails(), getSelfInventory());
com.done();
}
});
} else {
if (initialized) {
LocalStorageHypervisorFactory f = getHypervisorBackendFactory(host.getHypervisorType());
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.initializeHostAttachedPSMountPath(host.getUuid(), new Completion(com) {
@Override
public void success() {
SystemTagCreator creator = LocalStorageSystemTags.LOCALSTORAGE_HOST_INITIALIZED.newSystemTagCreator(host.getUuid());
creator.inherent = true;
creator.unique = false;
creator.setTagByTokens(map(e(LocalStorageSystemTags.LOCALSTORAGE_HOST_INITIALIZED_TOKEN, self.getUuid())));
creator.create();
com.done();
}
@Override
public void fail(ErrorCode errorCode) {
sendWarnning(host.getUuid(), errorCode.getDetails(), getSelfInventory());
com.done();
}
});
} else {
com.done();
}
}
}).run(new WhileDoneCompletion(completion) {
@Override
public void done(ErrorCodeList errorCodeList) {
completion.success();
}
});
}
@Override
protected void pingHook(Completion completion) {
checkLocalStoragePrimaryStorageInitilized(true, false, completion);
}
@Override
protected void handle(ShrinkVolumeSnapshotOnPrimaryStorageMsg msg) {
bus.dealWithUnknownMessage(msg);
}
@Override
final protected void syncPhysicalCapacity(final ReturnValueCompletion<PhysicalCapacityUsage> completion) {
final List<FactoryCluster> fs = getAllFactoriesForAttachedClusters();
class Sync {
long total = 0;
long avail = 0;
Iterator<FactoryCluster> it = fs.iterator();
void sync() {
if (!it.hasNext()) {
PhysicalCapacityUsage ret = new PhysicalCapacityUsage();
ret.totalPhysicalSize = total;
ret.availablePhysicalSize = avail;
completion.success(ret);
return;
}
FactoryCluster fc = it.next();
LocalStorageHypervisorBackend bkd = fc.factory.getHypervisorBackend(self);
bkd.syncPhysicalCapacityInCluster(fc.clusters, new ReturnValueCompletion<PhysicalCapacityUsage>(completion) {
@Override
public void success(PhysicalCapacityUsage returnValue) {
total += returnValue.totalPhysicalSize;
avail += returnValue.availablePhysicalSize;
sync();
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
}
new Sync().sync();
}
protected LocalStorageHypervisorFactory getHypervisorBackendFactoryByHostUuid(String hostUuid) {
return getHypervisorBackendFactoryByHostUuid(hostUuid, true);
}
protected LocalStorageHypervisorFactory getHypervisorBackendFactoryByHostUuid(String hostUuid, boolean checkPsRef) {
if (checkPsRef && !Q.New(LocalStorageHostRefVO.class)
.eq(LocalStorageHostRefVO_.hostUuid, hostUuid)
.eq(LocalStorageHostRefVO_.primaryStorageUuid, self.getUuid()).isExists()) {
throw new OperationFailureException(operr("host[uuid:%s] cannot access local storage[uuid:%s], maybe it is detached", hostUuid, self.getUuid()));
}
SimpleQuery<HostVO> q = dbf.createQuery(HostVO.class);
q.select(HostVO_.hypervisorType);
q.add(HostVO_.uuid, Op.EQ, hostUuid);
String hvType = q.findValue();
return getHypervisorBackendFactory(hvType);
}
@Transactional(readOnly = true)
private LocalStorageHypervisorFactory getHypervisorBackendFactoryByResourceUuid(String resUuid, String resourceType) {
String sql = "select host.hypervisorType" +
" from HostVO host, LocalStorageResourceRefVO ref" +
" where ref.hostUuid = host.uuid" +
" and ref.resourceUuid = :resUuid" +
" and ref.primaryStorageUuid = :puuid group by hypervisorType";
TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("resUuid", resUuid);
q.setParameter("puuid", self.getUuid());
List<String> ret = q.getResultList();
if (ret.isEmpty()) {
throw new OperationFailureException(
operr("resource[uuid:%s, type: %s] is not on the local primary storage[uuid:%s]",
resUuid, resourceType, self.getUuid()));
}
if (ret.size() != 1) {
throw new OperationFailureException(
operr("resource[uuid:%s, type: %s] on the local primary storage[uuid:%s] maps to multiple hypervisor%s",
resUuid, resourceType, self.getUuid(), ret));
}
String hvType = ret.get(0);
return getHypervisorBackendFactory(hvType);
}
private LocalStorageHypervisorFactory getHypervisorBackendFactory(String hvType) {
DebugUtils.Assert(hvType != null, "hvType is null!!!");
for (LocalStorageHypervisorFactory f : pluginRgty.getExtensionList(LocalStorageHypervisorFactory.class)) {
if (hvType.equals(f.getHypervisorType())) {
return f;
}
}
throw new CloudRuntimeException(String.format("cannot find LocalStorageHypervisorFactory with hypervisorType[%s]", hvType));
}
@Override
public void attachHook(final String clusterUuid, Completion completion) {
SimpleQuery<ClusterVO> q = dbf.createQuery(ClusterVO.class);
q.select(ClusterVO_.hypervisorType);
q.add(ClusterVO_.uuid, Op.EQ, clusterUuid);
String hvType = q.findValue();
LocalStorageHypervisorFactory f = getHypervisorBackendFactory(hvType);
LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.attachHook(clusterUuid, completion);
}
@Override
protected void checkImageIfNeedToDownload(DownloadIsoToPrimaryStorageMsg msg) {
logger.debug("check if image exist in disabled primary storage");
if (self.getState() != PrimaryStorageState.Disabled) {
return;
}
if (!Q.New(ImageCacheVO.class)
.eq(ImageCacheVO_.primaryStorageUuid, self.getUuid())
.eq(ImageCacheVO_.imageUuid, msg.getIsoSpec().getInventory().getUuid())
.like(ImageCacheVO_.installUrl, String.format("%%hostUuid://%s%%", msg.getDestHostUuid()))
.isExists()) {
throw new OperationFailureException(operr(
"cannot attach ISO to a primary storage[uuid:%s] which is disabled",
self.getUuid()));
}
}
@Override
public void handle(AskInstallPathForNewSnapshotMsg msg) {
String hvType = Q.New(HostVO.class)
.select(HostVO_.hypervisorType)
.eq(HostVO_.uuid, msg.getHostUuid())
.findValue();
LocalStorageHypervisorFactory f = getHypervisorBackendFactory(hvType);
final LocalStorageHypervisorBackend bkd = f.getHypervisorBackend(self);
bkd.handle(msg, new ReturnValueCompletion<AskInstallPathForNewSnapshotReply>(msg) {
@Override
public void success(AskInstallPathForNewSnapshotReply returnValue) {
bus.reply(msg, returnValue);
}
@Override
public void fail(ErrorCode errorCode) {
AskInstallPathForNewSnapshotReply reply = new AskInstallPathForNewSnapshotReply();
reply.setSuccess(false);
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
@Override
protected void handle(GetPrimaryStorageResourceLocationMsg msg) {
GetPrimaryStorageResourceLocationReply reply = new GetPrimaryStorageResourceLocationReply();
reply.setPrimaryStorageUuid(msg.getPrimaryStorageUuid());
List<String> hostUuids = Q.New(LocalStorageResourceRefVO.class)
.eq(LocalStorageResourceRefVO_.resourceUuid, msg.getResourceUuid())
.select(LocalStorageResourceRefVO_.hostUuid)
.listValues();
reply.setHostUuids(hostUuids);
bus.reply(msg, reply);
}
@Override
protected void handle(CheckVolumeSnapshotOperationOnPrimaryStorageMsg msg) {
CheckVolumeSnapshotOperationOnPrimaryStorageReply r = new CheckVolumeSnapshotOperationOnPrimaryStorageReply();
List<String> disconnectHostUuids = SQL.New("select h.uuid from HostVO h, LocalStorageResourceRefVO ref" +
" where ref.resourceUuid in :volUuids" +
" and ref.hostUuid = h.uuid" +
" and h.status != :hstatus", String.class)
.param("hstatus", HostStatus.Connected)
.param("volUuids", msg.getVolumeUuids())
.list();
if (!disconnectHostUuids.isEmpty()) {
r.setError(err(HostErrors.HOST_IS_DISCONNECTED, "host(s)[uuids: %s] volume locate is not Connected.", disconnectHostUuids));
}
bus.reply(msg, r);
}
@Override
protected void handle(ChangeVolumeTypeOnPrimaryStorageMsg msg) {
LocalStorageHypervisorFactory factory = getHypervisorBackendFactoryByResourceUuid(msg.getVolume().getUuid(), VolumeVO.class.getSimpleName());
factory.getHypervisorBackend(self).handle(msg, new ReturnValueCompletion<ChangeVolumeTypeOnPrimaryStorageReply>(msg) {
@Override
public void success(ChangeVolumeTypeOnPrimaryStorageReply reply) {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
ChangeVolumeTypeOnPrimaryStorageReply r = new ChangeVolumeTypeOnPrimaryStorageReply();
r.setError(errorCode);
bus.reply(msg, r);
}
});
}
public static class LocalStoragePhysicalCapacityUsage extends PrimaryStorageBase.PhysicalCapacityUsage {
public long localStorageUsedSize;
}
}
| 70,718 |
350 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Version 1 of the Ironic API
Specification can be found at doc/source/webapi/v1.rst
"""
from http import client as http_client
import pecan
from webob import exc
from ironic import api
from ironic.api.controllers import base
from ironic.api.controllers import link
from ironic.api.controllers.v1 import allocation
from ironic.api.controllers.v1 import chassis
from ironic.api.controllers.v1 import conductor
from ironic.api.controllers.v1 import deploy_template
from ironic.api.controllers.v1 import driver
from ironic.api.controllers.v1 import event
from ironic.api.controllers.v1 import node
from ironic.api.controllers.v1 import port
from ironic.api.controllers.v1 import portgroup
from ironic.api.controllers.v1 import ramdisk
from ironic.api.controllers.v1 import utils
from ironic.api.controllers.v1 import versions
from ironic.api.controllers.v1 import volume
from ironic.api.controllers import version
from ironic.api import method
from ironic.common.i18n import _
BASE_VERSION = versions.BASE_VERSION
def min_version():
return base.Version(
{base.Version.string: versions.min_version_string()},
versions.min_version_string(), versions.max_version_string())
def max_version():
return base.Version(
{base.Version.string: versions.max_version_string()},
versions.min_version_string(), versions.max_version_string())
def v1():
v1 = {
'id': "v1",
'links': [
link.make_link('self', api.request.public_url,
'v1', '', bookmark=True),
link.make_link('describedby',
'https://docs.openstack.org',
'/ironic/latest/contributor/',
'webapi.html',
bookmark=True, type='text/html')
],
'media_types': {
'base': 'application/json',
'type': 'application/vnd.openstack.ironic.v1+json'
},
'chassis': [
link.make_link('self', api.request.public_url,
'chassis', ''),
link.make_link('bookmark',
api.request.public_url,
'chassis', '',
bookmark=True)
],
'nodes': [
link.make_link('self', api.request.public_url,
'nodes', ''),
link.make_link('bookmark',
api.request.public_url,
'nodes', '',
bookmark=True)
],
'ports': [
link.make_link('self', api.request.public_url,
'ports', ''),
link.make_link('bookmark',
api.request.public_url,
'ports', '',
bookmark=True)
],
'drivers': [
link.make_link('self', api.request.public_url,
'drivers', ''),
link.make_link('bookmark',
api.request.public_url,
'drivers', '',
bookmark=True)
],
'version': version.default_version()
}
if utils.allow_portgroups():
v1['portgroups'] = [
link.make_link('self', api.request.public_url,
'portgroups', ''),
link.make_link('bookmark', api.request.public_url,
'portgroups', '', bookmark=True)
]
if utils.allow_volume():
v1['volume'] = [
link.make_link('self',
api.request.public_url,
'volume', ''),
link.make_link('bookmark',
api.request.public_url,
'volume', '',
bookmark=True)
]
if utils.allow_ramdisk_endpoints():
v1['lookup'] = [
link.make_link('self', api.request.public_url,
'lookup', ''),
link.make_link('bookmark',
api.request.public_url,
'lookup', '',
bookmark=True)
]
v1['heartbeat'] = [
link.make_link('self',
api.request.public_url,
'heartbeat', ''),
link.make_link('bookmark',
api.request.public_url,
'heartbeat', '',
bookmark=True)
]
if utils.allow_expose_conductors():
v1['conductors'] = [
link.make_link('self',
api.request.public_url,
'conductors', ''),
link.make_link('bookmark',
api.request.public_url,
'conductors', '',
bookmark=True)
]
if utils.allow_allocations():
v1['allocations'] = [
link.make_link('self',
api.request.public_url,
'allocations', ''),
link.make_link('bookmark',
api.request.public_url,
'allocations', '',
bookmark=True)
]
if utils.allow_expose_events():
v1['events'] = [
link.make_link('self', api.request.public_url,
'events', ''),
link.make_link('bookmark',
api.request.public_url,
'events', '',
bookmark=True)
]
if utils.allow_deploy_templates():
v1['deploy_templates'] = [
link.make_link('self',
api.request.public_url,
'deploy_templates', ''),
link.make_link('bookmark',
api.request.public_url,
'deploy_templates', '',
bookmark=True)
]
return v1
class Controller(object):
"""Version 1 API controller root."""
_subcontroller_map = {
'nodes': node.NodesController(),
'ports': port.PortsController(),
'portgroups': portgroup.PortgroupsController(),
'chassis': chassis.ChassisController(),
'drivers': driver.DriversController(),
'volume': volume.VolumeController(),
'lookup': ramdisk.LookupController(),
'heartbeat': ramdisk.HeartbeatController(),
'conductors': conductor.ConductorsController(),
'allocations': allocation.AllocationsController(),
'events': event.EventsController(),
'deploy_templates': deploy_template.DeployTemplatesController()
}
@method.expose()
def index(self):
# NOTE: The reason why v1() it's being called for every
# request is because we need to get the host url from
# the request object to make the links.
self._add_version_attributes()
if api.request.method != "GET":
pecan.abort(http_client.METHOD_NOT_ALLOWED)
return v1()
def _check_version(self, version, headers=None):
if headers is None:
headers = {}
# ensure that major version in the URL matches the header
if version.major != BASE_VERSION:
raise exc.HTTPNotAcceptable(_(
"Mutually exclusive versions requested. Version %(ver)s "
"requested but not supported by this service. The supported "
"version range is: [%(min)s, %(max)s].") %
{'ver': version, 'min': versions.min_version_string(),
'max': versions.max_version_string()},
headers=headers)
# ensure the minor version is within the supported range
if version < min_version() or version > max_version():
raise exc.HTTPNotAcceptable(_(
"Version %(ver)s was requested but the minor version is not "
"supported by this service. The supported version range is: "
"[%(min)s, %(max)s].") %
{'ver': version, 'min': versions.min_version_string(),
'max': versions.max_version_string()},
headers=headers)
def _add_version_attributes(self):
v = base.Version(api.request.headers, versions.min_version_string(),
versions.max_version_string())
# Always set the min and max headers
api.response.headers[base.Version.min_string] = (
versions.min_version_string())
api.response.headers[base.Version.max_string] = (
versions.max_version_string())
# assert that requested version is supported
self._check_version(v, api.response.headers)
api.response.headers[base.Version.string] = str(v)
api.request.version = v
@pecan.expose()
def _lookup(self, primary_key, *remainder):
self._add_version_attributes()
controller = self._subcontroller_map.get(primary_key)
if not controller:
pecan.abort(http_client.NOT_FOUND)
return controller, remainder
__all__ = ('Controller',)
| 4,950 |
7,482 | /*******************************************************************************
* Copyright (C) 2020, Huada Semiconductor Co., Ltd. All rights reserved.
*
* This software component is licensed by HDSC under BSD 3-Clause license
* (the "License"); You may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
* opensource.org/licenses/BSD-3-Clause
*/
/******************************************************************************/
/** \file hc32f460_hash.c
**
** A detailed description is available at
** @link HashGroup HASH description @endlink
**
** - 2018-10-18 CDT First version for Device Driver Library of HASH.
**
******************************************************************************/
/*******************************************************************************
* Include files
******************************************************************************/
#include "hc32f460_hash.h"
#include "hc32f460_utility.h"
#if (DDL_HASH_ENABLE == DDL_ON)
/**
*******************************************************************************
** \addtogroup HashGroup
******************************************************************************/
//@{
/*******************************************************************************
* Local type definitions ('typedef')
******************************************************************************/
/*******************************************************************************
* Local pre-processor symbols/macros ('#define')
******************************************************************************/
/* Constants definitions. */
#define HASH_GROUP_LEN (64u)
#define LAST_GROUP_MAX_LEN (56u)
/*******************************************************************************
* Global variable definitions (declared in header file with 'extern')
******************************************************************************/
/*******************************************************************************
* Local function prototypes ('static')
******************************************************************************/
static void HASH_WriteData(const uint8_t *pu8SrcData);
static void HASH_GetMsgDigest(uint8_t *pu8MsgDigest);
/*******************************************************************************
* Local variable definitions ('static')
******************************************************************************/
/*******************************************************************************
* Function implementation - global ('extern') and local ('static')
******************************************************************************/
/**
*******************************************************************************
** \brief Initialize the HASH.
**
** \param None
**
** \retval None
**
******************************************************************************/
void HASH_Init(void)
{
/* Stop hash calculating */
bM4_HASH_CR_START = 0u;
}
/**
*******************************************************************************
** \brief DeInitialize the HASH.
**
** \param None
**
** \retval None
**
******************************************************************************/
void HASH_DeInit(void)
{
/* Stop hash calculating */
bM4_HASH_CR_START = 0u;
/* Reset register CR. */
M4_HASH->CR = 0u;
}
/**
*******************************************************************************
** \brief HASH(SHA256) processes pu8SrcData.
**
** \param [in] pu8SrcData Pointer to the source data buffer (buffer to
** be hashed).
**
** \param [in] u32SrcDataSize Length of the input buffer in bytes.
**
** \param [out] pu8MsgDigest Pointer to the computed digest. Its size
** must be 32 bytes.
**
** \param [in] u32Timeout Timeout value.
**
** \retval Ok No error occurred.
** \retval ErrorTimeout HASH works timeout.
** \retval ErrorInvalidParameter Parameter error.
**
******************************************************************************/
en_result_t HASH_Start(const uint8_t *pu8SrcData,
uint32_t u32SrcDataSize,
uint8_t *pu8MsgDigest,
uint32_t u32Timeout)
{
en_result_t enRet = ErrorInvalidParameter;
uint8_t u8FillBuffer[HASH_GROUP_LEN];
uint8_t u8FirstGroup = 0u;
uint8_t u8HashEnd = 0u;
uint8_t u8DataEndMark = 0u;
uint32_t u32Index = 0u;
uint32_t u32BitLenHi;
uint32_t u32BitLenLo;
uint32_t u32HashTimeout;
__IO uint32_t u32TimeCount;
if ((NULL != pu8SrcData) &&
(0u != u32SrcDataSize) &&
(NULL != pu8MsgDigest) &&
(0u != u32Timeout))
{
/* 10 is the number of required instructions cycles for the below loop statement. */
u32HashTimeout = u32Timeout * (SystemCoreClock / 10u / 1000u);
u32BitLenHi = (u32SrcDataSize >> 29u) & 0x7u;
u32BitLenLo = (u32SrcDataSize << 3u);
while (1u)
{
/* Stop hash calculating. */
bM4_HASH_CR_START = 0u;
if (u32SrcDataSize >= HASH_GROUP_LEN)
{
HASH_WriteData(&pu8SrcData[u32Index]);
u32SrcDataSize -= HASH_GROUP_LEN;
u32Index += HASH_GROUP_LEN;
}
else if (u32SrcDataSize >= LAST_GROUP_MAX_LEN)
{
memset(u8FillBuffer, 0, HASH_GROUP_LEN);
memcpy(u8FillBuffer, &pu8SrcData[u32Index], u32SrcDataSize);
u8FillBuffer[u32SrcDataSize] = 0x80u;
u8DataEndMark = 1u;
HASH_WriteData(u8FillBuffer);
u32SrcDataSize = 0u;
}
else
{
u8HashEnd = 1u;
}
if (u8HashEnd != 0u)
{
memset(u8FillBuffer, 0, HASH_GROUP_LEN);
if (u32SrcDataSize > 0u)
{
memcpy(u8FillBuffer, &pu8SrcData[u32Index], u32SrcDataSize);
}
if (u8DataEndMark == 0u)
{
u8FillBuffer[u32SrcDataSize] = 0x80u;
}
u8FillBuffer[63u] = (uint8_t)(u32BitLenLo);
u8FillBuffer[62u] = (uint8_t)(u32BitLenLo >> 8u);
u8FillBuffer[61u] = (uint8_t)(u32BitLenLo >> 16u);
u8FillBuffer[60u] = (uint8_t)(u32BitLenLo >> 24u);
u8FillBuffer[59u] = (uint8_t)(u32BitLenHi);
u8FillBuffer[58u] = (uint8_t)(u32BitLenHi >> 8u);
u8FillBuffer[57u] = (uint8_t)(u32BitLenHi >> 16u);
u8FillBuffer[56u] = (uint8_t)(u32BitLenHi >> 24u);
HASH_WriteData(u8FillBuffer);
}
/* check if first group */
if (0u == u8FirstGroup)
{
u8FirstGroup = 1u;
/* Set first group. */
bM4_HASH_CR_FST_GRP = 1u;
}
else
{
/* Set continuous group. */
bM4_HASH_CR_FST_GRP = 0u;
}
/* Start hash calculating. */
bM4_HASH_CR_START = 1u;
u32TimeCount = 0u;
enRet = ErrorTimeout;
while (u32TimeCount < u32HashTimeout)
{
if (bM4_HASH_CR_START == 0u)
{
enRet = Ok;
break;
}
u32TimeCount++;
}
if ((ErrorTimeout == enRet) || (u8HashEnd != 0u))
{
break;
}
}
if (Ok == enRet)
{
/* HASH calculated done */
HASH_GetMsgDigest(pu8MsgDigest);
}
/* Stop hash calculating. */
bM4_HASH_CR_START = 0u;
}
return enRet;
}
/*******************************************************************************
* Function implementation - local ('static')
******************************************************************************/
/**
*******************************************************************************
** \brief Writes the input buffer in data register.
**
** \param [in] pu8SrcData Pointer to source data buffer.
**
** \retval None
**
******************************************************************************/
static void HASH_WriteData(const uint8_t *pu8SrcData)
{
uint8_t i;
uint8_t j;
uint32_t u32Temp;
__IO uint32_t *io32HashDr = &(M4_HASH->DR15);
for (i = 0u; i < 16u; i++)
{
j = i * 4u + 3u;
u32Temp = (uint32_t)pu8SrcData[j];
u32Temp |= ((uint32_t)pu8SrcData[j-1u]) << 8u;
u32Temp |= ((uint32_t)pu8SrcData[j-2u]) << 16u;
u32Temp |= ((uint32_t)pu8SrcData[j-3u]) << 24u;
*io32HashDr = u32Temp;
io32HashDr++;
}
}
/**
*******************************************************************************
** \brief Provides the message digest result.
**
** \param [out] pu8MsgDigest Pointer to the message digest.
**
** \retval None
**
******************************************************************************/
static void HASH_GetMsgDigest(uint8_t *pu8MsgDigest)
{
uint8_t i;
uint8_t j;
uint32_t u32Temp;
__IO uint32_t *io32HashHr = &(M4_HASH->HR7);
for (i = 0u; i < 8u; i++)
{
j = i * 4u + 3u;
u32Temp = *io32HashHr;
pu8MsgDigest[j] = (uint8_t)u32Temp;
pu8MsgDigest[j-1u] = (uint8_t)(u32Temp >> 8u);
pu8MsgDigest[j-2u] = (uint8_t)(u32Temp >> 16u);
pu8MsgDigest[j-3u] = (uint8_t)(u32Temp >> 24u);
io32HashHr++;
}
}
//@} // HashGroup
#endif /* DDL_HASH_ENABLE */
/*******************************************************************************
* EOF (not truncated)
******************************************************************************/
| 4,310 |
601 | package io.lacuna.artifex;
import java.util.ArrayList;
import java.util.List;
import static io.lacuna.artifex.Vec.vec;
import static java.lang.Double.NaN;
/**
* @author ztellman
*/
public class Box2 extends Box<Vec2, Box2> {
public static final Box2 EMPTY = new Box2(vec(NaN, NaN), vec(NaN, NaN));
public final double lx, ly, ux, uy;
Box2(double ax, double ay, double bx, double by) {
if (ax < bx) {
this.lx = ax;
this.ux = bx;
} else {
this.ux = ax;
this.lx = bx;
}
if (ay < by) {
this.ly = ay;
this.uy = by;
} else {
this.uy = ay;
this.ly = by;
}
}
public Box3 box3(double lz, double uz) {
return new Box3(lx, ly, lz, ux, uy, uz);
}
public double width() {
return ux - lx;
}
public double height() {
return uy - ly;
}
public Box2(Vec2 a, Vec2 b) {
this(a.x, a.y, b.x, b.y);
}
public Box2 scale(double k) {
return scale(vec(k, k));
}
public Box2 scale(double x, double y) {
return scale(vec(x, y));
}
public Box2 translate(double x, double y) {
return translate(vec(x, y));
}
public Vec2[] vertices() {
return new Vec2[] {vec(lx, ly), vec(ux, ly), vec(ux, uy), vec(lx, uy)};
}
public Ring2 outline() {
List<Curve2> cs = new ArrayList<>();
Vec2[] vs = vertices();
for (int i = 0; i < vs.length; i++) {
cs.add(Line2.line(vs[i], vs[(i + 1) % 4]));
}
return new Ring2(cs);
}
@Override
public boolean intersects(Box2 b) {
if (isEmpty() || b.isEmpty()) {
return false;
}
return b.ux >= lx
& ux >= b.lx
& b.uy >= ly
& uy >= b.ly;
}
@Override
public Vec2 lower() {
return new Vec2(lx, ly);
}
@Override
public Vec2 upper() {
return new Vec2(ux, uy);
}
@Override
public boolean isEmpty() {
return this == EMPTY;
}
@Override
protected Box2 construct(Vec2 a, Vec2 b) {
return new Box2(a.x, a.y, b.x, b.y);
}
@Override
protected Box2 empty() {
return EMPTY;
}
}
| 935 |
1,615 | //
// ArgoBindingConvertor.h
// ArgoUI
//
// Created by <NAME> on 2020/9/2.
//
#import "MLNUIKiConvertor.h"
NS_ASSUME_NONNULL_BEGIN
@interface ArgoBindingConvertor : MLNUIKiConvertor
/**
和- (id)toNativeObject:(int)idx error:(NSError **)error
的区别在于会将lua table转换成ArgoObservableMap or ArgoObservableArray
@param idx lua 状态机上的位置
@param error 错误信息
@return 是否转换成功
*/
- (id)toArgoBindingNativeObject:(int)idx error:(NSError **)error;
/**
将Native对象转换为Lua数据,并压入栈顶
用于argo binding
@param obj 要转换的Native对象
@param error 错误信息学
@return 数据被转化压栈的个数,0代表未成功
*/
- (int)pushArgoBindingNativeObject:(id)obj error:(NSError **)error;
@end
NS_ASSUME_NONNULL_END
| 406 |
772 | {
"bg-BG": {
"data": {
"bubble_choice_description": {
"courseD_bee_nestedLoops8_2021": "Съберете всичкия нектар от всяко цвете и правете мед на всяка пита. \r\n\r\nИзползвайте вложено повторение(цикъл). "
}
}
}
} | 198 |
1,016 | <filename>vertx-web/src/test/java/io/vertx/ext/web/it/RouterExtendedParamTest.java
/*
* Copyright (c) 2011-2014 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.vertx.ext.web.it;
import io.vertx.core.http.HttpMethod;
import io.vertx.ext.web.WebTestBase;
import org.junit.Test;
/**
* @author <a href="http://tfox.org"><NAME></a>
* @author <NAME>
*/
public class RouterExtendedParamTest extends WebTestBase {
@Test
public void testRouteDashVariable() throws Exception {
router.route("/foo/:my-id").handler(rc -> {
assertEquals("123", rc.pathParam("my-id"));
rc.response().end();
});
testRequest(HttpMethod.GET, "/foo/123", 200, "OK");
}
@Test
public void testRouteDashVariableNOK() throws Exception {
router.route("/flights/:from-:to").handler(rc -> {
// from isn't set as the alphabet now includes -
assertNull(rc.pathParam("from"));
assertNotNull(rc.pathParam("from-"));
rc.response().end();
});
testRequest(HttpMethod.GET, "/flights/LAX-SFO", 200, "OK");
}
}
| 540 |
2,603 | /***********************************************************************************************************************
* DISCLAIMER
* This software is supplied by Renesas Electronics Corporation and is only intended for use with Renesas products. No
* other uses are authorized. This software is owned by Renesas Electronics Corporation and is protected under all
* applicable laws, including copyright laws.
* THIS SOFTWARE IS PROVIDED "AS IS" AND RENESAS MAKES NO WARRANTIES REGARDING
* THIS SOFTWARE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. ALL SUCH WARRANTIES ARE EXPRESSLY DISCLAIMED. TO THE MAXIMUM
* EXTENT PERMITTED NOT PROHIBITED BY LAW, NEITHER RENESAS ELECTRONICS CORPORATION NOR ANY OF ITS AFFILIATED COMPANIES
* SHALL BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES FOR ANY REASON RELATED TO THIS
* SOFTWARE, EVEN IF RENESAS OR ITS AFFILIATES HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
* Renesas reserves the right, without notice, to make changes to this software and to discontinue the availability of
* this software. By using this software, you agree to the additional terms and conditions found by accessing the
* following link:
* http://www.renesas.com/disclaimer
*
* Copyright (C) 2012 Renesas Electronics Corporation. All rights reserved.
***********************************************************************************************************************/
/***********************************************************************************************************************
* File Name : platform.h
* Description : The user chooses which MCU and board they are developing for in this file. If the board you are using
* is not listed below, please add your own or use the default 'User Board'.
***********************************************************************************************************************/
/***********************************************************************************************************************
* History : DD.MM.YYYY Version Description
* : 30.11.2011 1.00 First Release
* : 13.01.2012 1.10 Moved from having platform defined using macro defintion, to having platform defined
* by choosing an include path. This makes this file simpler and cleans up the issue
* where HEW shows all header files for all platforms under 'Dependencies'.
* : 14.02.2012 1.20 Added RX210 BSP.
* : 18.04.2012 1.30 Updated to v0.70 of FIT S/W Spec and v0.20 of FIT r_bsp Spec. This includes adding
* locking.c and locking.h in board folders. Also, r_bsp can now be configured through
* r_bsp_config.h.
* : 26.06.2012 1.40 Added new options such as exception callbacks and the ability to choose your MCU using
* its part number in r_bsp_config.h. Moved mcu_info.h to the 'mcu' folder. Made an effort
* to remove any extra files that the user would need to touch. Removed the flash_options.c
* file and put its contents in vecttbl.c.
* : 17.07.2012 1.50 Fixed bug with exception callback function names. Added BCLK_OUTPUT and SDCLK_OUTPUT
* macro options in r_bsp_config.h. Added some extra code to handle exceptions in
* vecttbl.c. Added vecttbl.h so that user has prototypes for exception callbacks.
* : 08.11.2012 1.60 Added RX111 BSP
***********************************************************************************************************************/
#ifndef _PLATFORM_H_
#define _PLATFORM_H_
/***********************************************************************************************************************
DEFINE YOUR SYSTEM - UNCOMMENT THE INCLUDE PATH FOR THE PLATFORM YOU ARE USING.
***********************************************************************************************************************/
/* RSKRX610 */
//#include "./board/rskrx610/r_bsp.h"
/* RSKRX62N */
//#include "./board/rskrx62n/r_bsp.h"
/* RSKRX62T */
//#include "./board/rskrx62t/r_bsp.h"
/* RDKRX62N */
//#include "./board/rdkrx62n/r_bsp.h"
/* RSKRX630 */
//#include "./board/rskrx630/r_bsp.h"
/* RSKRX63N */
//#include "./board/rskrx63n/r_bsp.h"
/* RDKRX63N */
//#include "./board/rdkrx63n/r_bsp.h"
/* RSKRX210 */
//#include "./board/rskrx210/r_bsp.h"
/* RSKRX111 */
#include "./board/rskrx111/r_bsp.h"
/* User Board - Define your own board here. */
//#include "./board/user/r_bsp.h"
/***********************************************************************************************************************
MAKE SURE AT LEAST ONE PLATFORM WAS DEFINED - DO NOT EDIT BELOW THIS POINT
***********************************************************************************************************************/
#ifndef PLATFORM_DEFINED
#error "Error - No platform defined in platform.h!"
#endif
#endif /* _PLATFORM_H_ */
| 1,740 |
303 | <filename>lightcrafts/src/com/lightcrafts/ui/layout/FadingTabConfiguration.java<gh_stars>100-1000
/* Copyright (C) 2005-2011 <NAME> */
package com.lightcrafts.ui.layout;
import javax.swing.*;
/**
* A fading tab is specified by a triplet: a component to show and hide,
* a name for the button text, and tooltip text
*/
public class FadingTabConfiguration {
public JComponent comp;
public String name;
public String tip;
public FadingTabConfiguration(
JComponent comp, String name, String tip
) {
this.comp = comp;
this.name = name;
this.tip = tip;
}
}
| 221 |
851 | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.testutil;
import android.app.Instrumentation;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.extractor.Extractor;
import com.google.android.exoplayer2.extractor.ExtractorInput;
import com.google.android.exoplayer2.extractor.ExtractorOutput;
import com.google.android.exoplayer2.extractor.PositionHolder;
import com.google.android.exoplayer2.extractor.SeekMap;
import com.google.android.exoplayer2.testutil.FakeExtractorInput.SimulatedIOException;
import com.google.android.exoplayer2.util.Assertions;
import java.io.IOException;
import java.util.Arrays;
import junit.framework.Assert;
/**
* Assertion methods for {@link Extractor}.
*/
public final class ExtractorAsserts {
/**
* A factory for {@link Extractor} instances.
*/
public interface ExtractorFactory {
Extractor create();
}
private static final String DUMP_EXTENSION = ".dump";
private static final String UNKNOWN_LENGTH_EXTENSION = ".unklen" + DUMP_EXTENSION;
/**
* Asserts that an extractor behaves correctly given valid input data:
* <ul>
* <li>Calls {@link Extractor#seek(long, long)} and {@link Extractor#release()} without calling
* {@link Extractor#init(ExtractorOutput)} to check these calls do not fail.</li>
* <li>Calls {@link #assertOutput(Extractor, String, byte[], Instrumentation, boolean, boolean,
* boolean, boolean)} with all possible combinations of "simulate" parameters.</li>
* </ul>
*
* @param factory An {@link ExtractorFactory} which creates instances of the {@link Extractor}
* class which is to be tested.
* @param file The path to the input sample.
* @param instrumentation To be used to load the sample file.
* @throws IOException If reading from the input fails.
* @throws InterruptedException If interrupted while reading from the input.
*/
public static void assertBehavior(ExtractorFactory factory, String file,
Instrumentation instrumentation) throws IOException, InterruptedException {
// Check behavior prior to initialization.
Extractor extractor = factory.create();
extractor.seek(0, 0);
extractor.release();
// Assert output.
byte[] fileData = TestUtil.getByteArray(instrumentation, file);
assertOutput(factory, file, fileData, instrumentation);
}
/**
* Calls {@link #assertOutput(Extractor, String, byte[], Instrumentation, boolean, boolean,
* boolean, boolean)} with all possible combinations of "simulate" parameters with
* {@code sniffFirst} set to true, and makes one additional call with the "simulate" and
* {@code sniffFirst} parameters all set to false.
*
* @param factory An {@link ExtractorFactory} which creates instances of the {@link Extractor}
* class which is to be tested.
* @param file The path to the input sample.
* @param data Content of the input file.
* @param instrumentation To be used to load the sample file.
* @throws IOException If reading from the input fails.
* @throws InterruptedException If interrupted while reading from the input.
*/
public static void assertOutput(ExtractorFactory factory, String file, byte[] data,
Instrumentation instrumentation) throws IOException, InterruptedException {
assertOutput(factory.create(), file, data, instrumentation, true, false, false, false);
assertOutput(factory.create(), file, data, instrumentation, true, false, false, true);
assertOutput(factory.create(), file, data, instrumentation, true, false, true, false);
assertOutput(factory.create(), file, data, instrumentation, true, false, true, true);
assertOutput(factory.create(), file, data, instrumentation, true, true, false, false);
assertOutput(factory.create(), file, data, instrumentation, true, true, false, true);
assertOutput(factory.create(), file, data, instrumentation, true, true, true, false);
assertOutput(factory.create(), file, data, instrumentation, true, true, true, true);
assertOutput(factory.create(), file, data, instrumentation, false, false, false, false);
}
/**
* Asserts that {@code extractor} consumes {@code sampleFile} successfully and its output equals
* to a prerecorded output dump file with the name {@code sampleFile} + "{@value
* #DUMP_EXTENSION}". If {@code simulateUnknownLength} is true and {@code sampleFile} + "{@value
* #UNKNOWN_LENGTH_EXTENSION}" exists, it's preferred.
*
* @param extractor The {@link Extractor} to be tested.
* @param file The path to the input sample.
* @param data Content of the input file.
* @param instrumentation To be used to load the sample file.
* @param sniffFirst Whether to sniff the data by calling {@link Extractor#sniff(ExtractorInput)}
* prior to consuming it.
* @param simulateIOErrors Whether to simulate IO errors.
* @param simulateUnknownLength Whether to simulate unknown input length.
* @param simulatePartialReads Whether to simulate partial reads.
* @return The {@link FakeExtractorOutput} used in the test.
* @throws IOException If reading from the input fails.
* @throws InterruptedException If interrupted while reading from the input.
*/
public static FakeExtractorOutput assertOutput(Extractor extractor, String file, byte[] data,
Instrumentation instrumentation, boolean sniffFirst, boolean simulateIOErrors,
boolean simulateUnknownLength, boolean simulatePartialReads) throws IOException,
InterruptedException {
FakeExtractorInput input = new FakeExtractorInput.Builder().setData(data)
.setSimulateIOErrors(simulateIOErrors)
.setSimulateUnknownLength(simulateUnknownLength)
.setSimulatePartialReads(simulatePartialReads).build();
if (sniffFirst) {
Assert.assertTrue(TestUtil.sniffTestData(extractor, input));
input.resetPeekPosition();
}
FakeExtractorOutput extractorOutput = consumeTestData(extractor, input, 0, true);
if (simulateUnknownLength
&& assetExists(instrumentation, file + UNKNOWN_LENGTH_EXTENSION)) {
extractorOutput.assertOutput(instrumentation, file + UNKNOWN_LENGTH_EXTENSION);
} else {
extractorOutput.assertOutput(instrumentation, file + ".0" + DUMP_EXTENSION);
}
SeekMap seekMap = extractorOutput.seekMap;
if (seekMap.isSeekable()) {
long durationUs = seekMap.getDurationUs();
for (int j = 0; j < 4; j++) {
long timeUs = (durationUs * j) / 3;
long position = seekMap.getPosition(timeUs);
input.setPosition((int) position);
for (int i = 0; i < extractorOutput.numberOfTracks; i++) {
extractorOutput.trackOutputs.valueAt(i).clear();
}
consumeTestData(extractor, input, timeUs, extractorOutput, false);
extractorOutput.assertOutput(instrumentation, file + '.' + j + DUMP_EXTENSION);
}
}
return extractorOutput;
}
/**
* Calls {@link #assertThrows(Extractor, byte[], Class, boolean, boolean, boolean)} with all
* possible combinations of "simulate" parameters.
*
* @param factory An {@link ExtractorFactory} which creates instances of the {@link Extractor}
* class which is to be tested.
* @param sampleFile The path to the input sample.
* @param instrumentation To be used to load the sample file.
* @param expectedThrowable Expected {@link Throwable} class.
* @throws IOException If reading from the input fails.
* @throws InterruptedException If interrupted while reading from the input.
* @see #assertThrows(Extractor, byte[], Class, boolean, boolean, boolean)
*/
public static void assertThrows(ExtractorFactory factory, String sampleFile,
Instrumentation instrumentation, Class<? extends Throwable> expectedThrowable)
throws IOException, InterruptedException {
byte[] fileData = TestUtil.getByteArray(instrumentation, sampleFile);
assertThrows(factory, fileData, expectedThrowable);
}
/**
* Calls {@link #assertThrows(Extractor, byte[], Class, boolean, boolean, boolean)} with all
* possible combinations of "simulate" parameters.
*
* @param factory An {@link ExtractorFactory} which creates instances of the {@link Extractor}
* class which is to be tested.
* @param fileData Content of the input file.
* @param expectedThrowable Expected {@link Throwable} class.
* @throws IOException If reading from the input fails.
* @throws InterruptedException If interrupted while reading from the input.
* @see #assertThrows(Extractor, byte[], Class, boolean, boolean, boolean)
*/
public static void assertThrows(ExtractorFactory factory, byte[] fileData,
Class<? extends Throwable> expectedThrowable) throws IOException, InterruptedException {
assertThrows(factory.create(), fileData, expectedThrowable, false, false, false);
assertThrows(factory.create(), fileData, expectedThrowable, true, false, false);
assertThrows(factory.create(), fileData, expectedThrowable, false, true, false);
assertThrows(factory.create(), fileData, expectedThrowable, true, true, false);
assertThrows(factory.create(), fileData, expectedThrowable, false, false, true);
assertThrows(factory.create(), fileData, expectedThrowable, true, false, true);
assertThrows(factory.create(), fileData, expectedThrowable, false, true, true);
assertThrows(factory.create(), fileData, expectedThrowable, true, true, true);
}
/**
* Asserts {@code extractor} throws {@code expectedThrowable} while consuming {@code sampleFile}.
*
* @param extractor The {@link Extractor} to be tested.
* @param fileData Content of the input file.
* @param expectedThrowable Expected {@link Throwable} class.
* @param simulateIOErrors If true simulates IOErrors.
* @param simulateUnknownLength If true simulates unknown input length.
* @param simulatePartialReads If true simulates partial reads.
* @throws IOException If reading from the input fails.
* @throws InterruptedException If interrupted while reading from the input.
*/
public static void assertThrows(Extractor extractor, byte[] fileData,
Class<? extends Throwable> expectedThrowable, boolean simulateIOErrors,
boolean simulateUnknownLength, boolean simulatePartialReads) throws IOException,
InterruptedException {
FakeExtractorInput input = new FakeExtractorInput.Builder().setData(fileData)
.setSimulateIOErrors(simulateIOErrors)
.setSimulateUnknownLength(simulateUnknownLength)
.setSimulatePartialReads(simulatePartialReads).build();
try {
consumeTestData(extractor, input, 0, true);
throw new AssertionError(expectedThrowable.getSimpleName() + " expected but not thrown");
} catch (Throwable throwable) {
if (expectedThrowable.equals(throwable.getClass())) {
return; // Pass!
}
throw throwable;
}
}
private ExtractorAsserts() {}
private static FakeExtractorOutput consumeTestData(Extractor extractor, FakeExtractorInput input,
long timeUs, boolean retryFromStartIfLive) throws IOException, InterruptedException {
FakeExtractorOutput output = new FakeExtractorOutput();
extractor.init(output);
consumeTestData(extractor, input, timeUs, output, retryFromStartIfLive);
return output;
}
private static void consumeTestData(Extractor extractor, FakeExtractorInput input, long timeUs,
FakeExtractorOutput output, boolean retryFromStartIfLive)
throws IOException, InterruptedException {
extractor.seek(input.getPosition(), timeUs);
PositionHolder seekPositionHolder = new PositionHolder();
int readResult = Extractor.RESULT_CONTINUE;
while (readResult != Extractor.RESULT_END_OF_INPUT) {
try {
// Extractor.read should not read seekPositionHolder.position. Set it to a value that's
// likely to cause test failure if a read does occur.
seekPositionHolder.position = Long.MIN_VALUE;
readResult = extractor.read(input, seekPositionHolder);
if (readResult == Extractor.RESULT_SEEK) {
long seekPosition = seekPositionHolder.position;
Assertions.checkState(0 <= seekPosition && seekPosition <= Integer.MAX_VALUE);
input.setPosition((int) seekPosition);
}
} catch (SimulatedIOException e) {
if (!retryFromStartIfLive) {
continue;
}
boolean isOnDemand = input.getLength() != C.LENGTH_UNSET
|| (output.seekMap != null && output.seekMap.getDurationUs() != C.TIME_UNSET);
if (isOnDemand) {
continue;
}
input.setPosition(0);
for (int i = 0; i < output.numberOfTracks; i++) {
output.trackOutputs.valueAt(i).clear();
}
extractor.seek(0, 0);
}
}
}
private static boolean assetExists(Instrumentation instrumentation, String fileName)
throws IOException {
int i = fileName.lastIndexOf('/');
String path = i >= 0 ? fileName.substring(0, i) : "";
String file = i >= 0 ? fileName.substring(i + 1) : fileName;
return Arrays.asList(instrumentation.getContext().getResources().getAssets().list(path))
.contains(file);
}
}
| 4,454 |
2,941 | from aws_cdk import (
core,
aws_lambda as _lambda,
aws_apigateway as _apigw
)
class ApiCorsLambdaStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
base_lambda = _lambda.Function(self, 'ApiCorsLambda',
handler='lambda-handler.handler',
runtime=_lambda.Runtime.PYTHON_3_7,
code=_lambda.Code.asset('lambda'))
base_api = _apigw.RestApi(self, 'ApiGatewayWithCors',
rest_api_name='ApiGatewayWithCors')
example_entity = base_api.root.add_resource(
'example',
default_cors_preflight_options=_apigw.CorsOptions(
allow_methods=['GET', 'OPTIONS'],
allow_origins=_apigw.Cors.ALL_ORIGINS)
)
example_entity_lambda_integration = _apigw.LambdaIntegration(
base_lambda,
proxy=False,
integration_responses=[{
'statusCode': '200',
'responseParameters': {
'method.response.header.Access-Control-Allow-Origin': "'*'",
}
}]
)
example_entity.add_method(
'GET', example_entity_lambda_integration,
method_responses=[{
'statusCode': '200',
'responseParameters': {
'method.response.header.Access-Control-Allow-Origin': True,
}
}]
)
app = core.App()
ApiCorsLambdaStack(app, "ApiCorsLambdaStack")
app.synth()
| 930 |
11,024 | #ifndef IOBASE_DEFINED
#define IOBASE_DEFINED 1
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stddef.h>
#include <time.h>
#include <setjmp.h>
#include <stdarg.h>
#include <string.h>
#endif
| 99 |
678 | /**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/MailServices.framework/MailServices
*/
#import <MailServices/MSMailDefaultService.h>
@interface MSSendDraft : MSMailDefaultService {
}
+ (id)sendDraftWithURL:(id)url delegate:(id)delegate; // 0x3c29
+ (id)smi_serverCommandName; // 0x3c1d
- (void)_smi_notifyClientDidFinishWithError:(id)_smi_notifyClient; // 0x3f29
- (id)_didSendDraft:(id)draft userInfo:(id)info; // 0x3df1
- (id)_sendMailDraftWithURL:(id)url delegate:(id)delegate; // 0x3cc5
@end
| 213 |
5,169 | <reponame>Gantios/Specs
{
"name": "KJLoadingAnimation",
"version": "1.1.5",
"summary": "Loading Animation.",
"homepage": "https://github.com/yangKJ/KJLoadingDemo",
"license": "Copyright (c) 2019 77",
"authors": {
"77": "<EMAIL>"
},
"platforms": {
"ios": null
},
"source": {
"git": "https://github.com/yangKJ/KJLoadingDemo.git",
"tag": "1.1.5"
},
"social_media_url": "https://www.jianshu.com/u/c84c00476ab6",
"requires_arc": true,
"frameworks": [
"Foundation",
"UIKit",
"CoreText"
],
"subspecs": [
{
"name": "KJLoadingAnimation",
"source_files": "KJLoadingDemo/KJLoadingAnimation/**/*.{h,m}",
"resources": "KJLoadingDemo/KJLoadingAnimation/**/*.{bundle}"
}
]
}
| 338 |
839 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.tools.corba.processors.idl;
import javax.wsdl.Definition;
import javax.xml.namespace.QName;
import antlr.collections.AST;
import org.apache.cxf.binding.corba.wsdl.Const;
import org.apache.cxf.binding.corba.wsdl.CorbaTypeImpl;
import org.apache.ws.commons.schema.XmlSchema;
import org.apache.ws.commons.schema.XmlSchemaType;
public class ConstVisitor extends VisitorBase {
public ConstVisitor(Scope scope,
Definition defn,
XmlSchema schemaRef,
WSDLASTVisitor wsdlVisitor) {
super(scope, defn, schemaRef, wsdlVisitor);
}
public static boolean accept(AST node) {
return node.getType() == IDLTokenTypes.LITERAL_const;
}
public void visit(AST constNode) {
// <const_dcl> ::= "const" <const_type> <identifier> "=" <const_exp>
// <const_type> ::= <integer_type>
// | <char_type>
// | <wide_char_type>
// | <boolean_type>
// | <floating_pt_type>
// | <string_type>
// | <wide_string_type>
// | <fixed_pt_const_type>
// | <scoped_name>
// | <octet_type>
AST constTypeNode = constNode.getFirstChild();
AST constNameNode = TypesUtils.getCorbaTypeNameNode(constTypeNode);
AST constValueNode = constNameNode.getNextSibling();
// build value string
StringBuilder constValue = new StringBuilder();
if (constValueNode.toString() != null) {
constValue.append(constValueNode.toString());
}
constValueNode = constValueNode.getFirstChild();
if (constValue.length() == 1) {
// might be a control char
byte ch = (byte)constValue.charAt(0);
if (ch >= 0 && ch <= 31) {
// ascii code between 0 and 31 is invisible control code
constValue.deleteCharAt(0);
constValue.append('\\').append(Integer.toOctalString(ch));
}
}
while (constValueNode != null) {
constValue.append(constValueNode.toString());
constValueNode = constValueNode.getFirstChild();
}
QName constQName = new QName(typeMap.getTargetNamespace(),
new Scope(getScope(), constNameNode).toString());
Visitor visitor = null;
if (PrimitiveTypesVisitor.accept(constTypeNode)) {
visitor = new PrimitiveTypesVisitor(getScope(), definition, schema, schemas);
} else if (StringVisitor.accept(constTypeNode)) {
// string_type_spec
// wstring_type_spec
visitor = new StringVisitor(getScope(), definition, schema, wsdlVisitor, constTypeNode);
} else if (FixedPtConstVisitor.accept(constTypeNode)) {
visitor = new FixedPtConstVisitor(getScope(), definition, schema, schemas);
} else if (ScopedNameVisitor.accept(getScope(), definition, schema, constTypeNode, wsdlVisitor)) {
visitor = new ScopedNameVisitor(getScope(), definition, schema, wsdlVisitor);
}
if (visitor == null) {
throw new RuntimeException("can't resolve type for const " + constNameNode.getText());
}
visitor.visit(constTypeNode);
XmlSchemaType constSchemaType = visitor.getSchemaType();
CorbaTypeImpl constCorbaType = visitor.getCorbaType();
// corba:const
Const corbaConst = new Const();
corbaConst.setQName(constQName);
corbaConst.setValue(constValue.toString());
corbaConst.setType(constSchemaType.getQName());
corbaConst.setIdltype(constCorbaType.getQName());
typeMap.getStructOrExceptionOrUnion().add(corbaConst);
}
}
| 1,975 |
491 | // Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing permissions and limitations under
// the License.
//
// ╔════════════════════════════════════════════════════════════════════════════════════════╗
// ║──█████████╗───███████╗───████████╗───██╗──────██╗───███████╗───████████╗───████████╗───║
// ║──██╔══════╝──██╔════██╗──██╔════██╗──██║──────██║──██╔════██╗──██╔════██╗──██╔════██╗──║
// ║──████████╗───██║────██║──████████╔╝──██║──█╗──██║──█████████║──████████╔╝──██║────██║──║
// ║──██╔═════╝───██║────██║──██╔════██╗──██║█████╗██║──██╔════██║──██╔════██╗──██║────██║──║
// ║──██║─────────╚███████╔╝──██║────██║──╚████╔████╔╝──██║────██║──██║────██║──████████╔╝──║
// ║──╚═╝──────────╚══════╝───╚═╝────╚═╝───╚═══╝╚═══╝───╚═╝────╚═╝──╚═╝────╚═╝──╚═══════╝───║
// ╚════════════════════════════════════════════════════════════════════════════════════════╝
//
// Authors: <NAME> (<EMAIL>)
// Yzx (<EMAIL>)
// <NAME> (<EMAIL>)
// <NAME> (<EMAIL>)
#pragma once
#include <string>
#include <vector>
#include <opencv2/opencv.hpp>
class Image {
public:
Image() = default;
/**
* 从文件读取图像内容
* \param image_path 图像文件路径
*/
explicit Image(const std::string& image_path);
/**
* \brief 从数据构造图像
* \param data 数据指针,指向 uint8 RGB 数据
* \param height 图像高度
* \param width 图像宽度
* \param channels 图像通道数
*/
Image(const uchar* data, int height, int width, int channels = 3);
/**
* \brief 获取图像的形状
* \return 三维 vector, 表示 (height, width, channels)
*/
std::vector<int64_t> Shape() const;
/**
* \brief 获取图像的数据指针
* \return 图像的数据指针,nullptr 表示图像为空
*/
uchar* Data() const;
bool IsOk() const { return image_.data != nullptr; }
/**
* 改变图像大小
* \param width 新的宽度
* \param height 新的高度
* \return this object
*/
Image& Resize(int width, int height);
/**
* \brief 转换颜色模式,目前仅支持 RGB <-> BGR
* \param from_bgr true 表示 BGR -> RGB, false 则相反;如果通道数不为
* 3,则什么也不做
*/
void ConvertColor(bool from_bgr);
/**
* 将图像写入到文件
* \param image_path 图像写入路径
* \return 成功返回 true
*/
bool Write(const std::string& image_path) const;
private:
/**
* \brief 根据通道数选择图像的类型
* \param channels 通道数
* \return 图像类型,如 CV_8UC1, CV_8UC3 等, 失败返回 -1
*/
static int SelectType(int channels);
cv::Mat image_;
};
| 1,589 |
5,169 | <filename>Specs/DKUIViewExtensions/0.0.1/DKUIViewExtensions.podspec.json
{
"name": "DKUIViewExtensions",
"version": "0.0.1",
"license": "MIT",
"summary": "An Animate Water view on iOS.",
"homepage": "https://github.com/DK-L-iOS/DKUIViewExtensions",
"authors": {
"DK-Li": "<EMAIL>"
},
"source": {
"git": "https://github.com/DK-L-iOS/DKUIViewExtensions.git",
"tag": "0.0.1"
},
"requires_arc": true,
"platforms": {
"ios": "7.0"
},
"source_files": "DKUIViewExtensions/*.{h,m}",
"dependencies": {
"MBProgressHUD": [
"~> 0.9.2"
]
}
}
| 275 |
460 | <filename>trunk/win/Source/Includes/QtIncludes/src/xmlpatterns/type/qnumerictype_p.h
/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (<EMAIL>)
**
** This file is part of the QtXmlPatterns module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
//
// W A R N I N G
// -------------
//
// This file is not part of the Qt API. It exists purely as an
// implementation detail. This header file may change from version to
// version without notice, or even be removed.
//
// We mean it.
#ifndef Patternist_NumericType_H
#define Patternist_NumericType_H
#include "qatomictype_p.h"
QT_BEGIN_HEADER
QT_BEGIN_NAMESPACE
namespace QPatternist
{
/**
* @short Represents the internal and abstract type @c fs:numeric.
*
* @see <a href="http://www.w3.org/TR/xquery-semantics/#dt-fs_numeric">XQuery 1.0
* and XPath 2.0 Formal Semantics, Definition: fs:numeric</a>
* @ingroup Patternist_types
* @author <NAME> <<EMAIL>>
*/
class NumericType : public AtomicType
{
public:
virtual ~NumericType();
virtual bool itemMatches(const Item &item) const;
virtual bool xdtTypeMatches(const ItemType::Ptr &other) const;
/**
* @returns always "numeric". That is, no namespace prefix
*/
virtual QString displayName(const NamePool::Ptr &np) const;
/**
* @returns always @c true
*/
virtual bool isAbstract() const;
/**
* @returns always @c false
*/
virtual bool isNodeType() const;
/**
* @returns always @c true
*/
virtual bool isAtomicType() const;
/**
* @returns always xs:anyAtomicType
*/
virtual SchemaType::Ptr wxsSuperType() const;
/**
* @returns always xs:anyAtomicType
*/
virtual ItemType::Ptr xdtSuperType() const;
/**
* @returns @c null. It makes no sense to atomize the abstract type @c fs:numeric.
*/
virtual ItemType::Ptr atomizedType() const;
/**
* NumericType cannot be visited. This function is only implemented
* to satisfy the abstract super class's interface.
*
* @returns always a @c null pointer
*/
virtual AtomicTypeVisitorResult::Ptr accept(const AtomicTypeVisitor::Ptr &visitor,
const SourceLocationReflection *const) const;
/**
* NumericType cannot be visited. This function is only implemented
* to satisfy the abstract super class's interface.
*
* @returns always a @c null pointer
*/
virtual AtomicTypeVisitorResult::Ptr accept(const ParameterizedAtomicTypeVisitor::Ptr &visitor,
const qint16 op,
const SourceLocationReflection *const) const;
/**
* The type @c fs:numeric is an abstract type which therefore
* cannot be involved in comparisons. Hence, this function returns
* @c null. This function is only implemented to satisfy the abstract
* super class's interface.
*
* @returns always a @c null pointer
*/
virtual AtomicComparatorLocator::Ptr comparatorLocator() const;
/**
* The type @c fs:numeric is an abstract type which therefore
* cannot be involved in arithmetics. Hence, this function returns
* @c null. This function is only implemented to satisfy the abstract
* super class's interface.
*
* @returns always a @c null pointer
*/
virtual AtomicMathematicianLocator::Ptr mathematicianLocator() const;
/**
* The type @c fs:numeric is an abstract type which therefore
* cannot be involved in casting. Hence, this function returns
* @c null. This function is only implemented to satisfy the abstract
* super class's interface.
*
* @returns always a @c null pointer
*/
virtual AtomicCasterLocator::Ptr casterLocator() const;
protected:
friend class BuiltinTypes;
NumericType();
};
}
QT_END_NAMESPACE
QT_END_HEADER
#endif
| 2,364 |
679 | <gh_stars>100-1000
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#include <cppuhelper/implbase1.hxx>
#include <com/sun/star/xml/sax/XAttributeList.hpp>
#include <com/sun/star/xml/sax/SAXException.hpp>
#include <com/sun/star/xml/sax/XDocumentHandler.hpp>
#include <com/sun/star/xml/sax/XExtendedDocumentHandler.hpp>
#include <com/sun/star/xml/sax/XParser.hpp>
#include <com/sun/star/lang/NoSupportException.hpp>
#include <com/sun/star/lang/XMultiComponentFactory.hpp>
namespace css = ::com::sun::star;
namespace dp_registry
{
namespace backend
{
namespace sfwk
{
typedef ::cppu::WeakImplHelper1< css::xml::sax::XDocumentHandler > t_DocHandlerImpl;
class ParcelDescDocHandler : public t_DocHandlerImpl
{
private:
bool m_bIsParsed;
::rtl::OUString m_sLang;
sal_Int32 skipIndex;
public:
ParcelDescDocHandler():m_bIsParsed( false ), skipIndex( 0 ){}
::rtl::OUString getParcelLanguage() { return m_sLang; }
bool isParsed() { return m_bIsParsed; }
// XDocumentHandler
virtual void SAL_CALL startDocument()
throw ( css::xml::sax::SAXException, css::uno::RuntimeException );
virtual void SAL_CALL endDocument()
throw ( css::xml::sax::SAXException, css::uno::RuntimeException );
virtual void SAL_CALL startElement( const ::rtl::OUString& aName,
const css::uno::Reference< css::xml::sax::XAttributeList > & xAttribs )
throw ( css::xml::sax::SAXException,
css::uno::RuntimeException );
virtual void SAL_CALL endElement( const ::rtl::OUString & aName )
throw ( css::xml::sax::SAXException, css::uno::RuntimeException );
virtual void SAL_CALL characters( const ::rtl::OUString & aChars )
throw ( css::xml::sax::SAXException, css::uno::RuntimeException );
virtual void SAL_CALL ignorableWhitespace( const ::rtl::OUString & aWhitespaces )
throw ( css::xml::sax::SAXException, css::uno::RuntimeException );
virtual void SAL_CALL processingInstruction(
const ::rtl::OUString & aTarget, const ::rtl::OUString & aData )
throw ( css::xml::sax::SAXException, css::uno::RuntimeException );
virtual void SAL_CALL setDocumentLocator(
const css::uno::Reference< css::xml::sax::XLocator >& xLocator )
throw ( css::xml::sax::SAXException, css::uno::RuntimeException );
};
}
}
}
| 1,116 |
9,095 | <reponame>lorentzenchr/scipy<gh_stars>1000+
"Iterative Solvers for Sparse Linear Systems"
#from info import __doc__
from .iterative import *
from .minres import minres
from .lgmres import lgmres
from .lsqr import lsqr
from .lsmr import lsmr
from ._gcrotmk import gcrotmk
from .tfqmr import tfqmr
__all__ = [
'bicg', 'bicgstab', 'cg', 'cgs', 'gcrotmk', 'gmres',
'lgmres', 'lsmr', 'lsqr',
'minres', 'qmr', 'tfqmr'
]
from scipy._lib._testutils import PytestTester
test = PytestTester(__name__)
del PytestTester
| 220 |
440 | /*========================== begin_copyright_notice ============================
Copyright (C) 2017-2021 Intel Corporation
SPDX-License-Identifier: MIT
============================= end_copyright_notice ===========================*/
//
/// GenXCategory
/// ------------
///
/// This pass performs five functions:
///
/// 1. It splits any struct phi into a phi for each element of the struct. This
/// is done in GenXLowering, but a subsequent pass can re-insert a struct phi so
/// this pass mops those up.
///
/// 2. It resolves each overlapping circular phi value.
///
/// LLVM IR does not attach
/// any importance to the order of phi nodes in any particular basic block.
/// At the head of a loop, a phi incoming can also be a phi definition in the
/// same block, and they could be in either order.
///
/// However, once we start constructing live ranges in the GenX backend, we
/// attach importance to the order of the phi nodes, so we need to resolve
/// any such overlapping circular phi value. Currently we do this by
/// inserting a copy (actually a bitcast) just after the phi nodes in that
/// basic block. A future enhancement would be to try and re-order the phi
/// nodes, and only fall back to copy insertion if there is circularity and
/// it is impossible to find a correct order, for example when the loop body
/// swaps two variables over.
///
/// 3. It inserts a load for any operand that is constant but not allowed to be.
/// It also catches any case where constant propagation in EarlyCSE has
/// caused a non-simple constant to be propagated into the instruction.
/// See the GenXConstants section above.
// (in GenXConstants.cpp)
///
/// 4. It determines the register category and increased alignment requirement
/// (e.g. use as a raw operand) of each value, and stores it by creating a
/// LiveRange for the value and storing it there. At this stage the LiveRange
/// does not contain any other information; GenXLiveRanges populates it further
/// (or erases it if the value turns out to be baled in).
///
/// 5. It inserts instructions as required to convert from one register
/// category to another, where a value has its def and uses not all requiring
/// the same category.
///
/// All this pass inserts is a llvm.genx.convert intrinsic. It does not record
/// what the categories are. This information is recalculated in GenXLiveness.
///
/// The reason for inserting the convert intrinsic calls here, before the final
/// run of GenXBaling before GenXLiveRanges, is that we want GenXBaling to spot
/// when a convert intrinsic can be baled with rdregion or wrregion.
///
/// For one value (function argument or instruction), the pass looks at the
/// categories required for the defintion and each use. If there is no address
/// conversion involved, then it inserts a single conversion if possible (all
/// uses are the same category), otherwise it inserts a conversion for each use
/// that requires one.
///
/// **IR restriction**: After this pass, a value must have its def and all uses
/// requiring the same register category.
///
/// Address conversion
/// ^^^^^^^^^^^^^^^^^^
///
/// An address conversion is treated slightly differently.
///
/// A rdregion/wrregion representing an indirect region has a variable index.
/// This index is actually an index, whereas the vISA we need to generate for
/// it uses an address register that has been set up with an ``add_addr``
/// instruction from the index and the base register.
///
/// This pass inserts an ``llvm.genx.convert.addr`` intrinsic, with zero offset,
/// to represent the conversion from index to address register. However, the
/// intrinsic has no way of representing the base register. Instead, the base
/// register is implicitly the "old value" input of the rdregion/wrregion where
/// the address is used.
///
/// The same index may well be used in multiple rdregions and wrregions,
/// especially after LLVM's CSE. But at this stage we have no idea whether
/// these multiple rdregions/wrregions will have the same base register, so
/// we must assume not and insert a separate ``llvm.genx.convert.addr``
/// for each rdregion/wrregion use of the index.
///
/// These multiple address conversions of the same index are commoned up
/// where possible later on in GenXAddressCommoning. That pass runs after
/// GenXCoalescing, so it can tell whether two address conversions of the
/// same index also have the same base register because the "old value"
/// inputs of the regions have been coalesced together.
///
/// Where an index used in an indirect region is a constant add, this pass
/// inserts the ``llvm.genx.convert.addr`` before that, and turns the constant
/// add into ``llvm.genx.add.addr``. The latter can be baled into rdregion
/// or wrregion, representing a constant offset in the indirect region.
/// Only one ``llvm.genx.add.addr`` is allowed between the
/// ``llvm.genx.convert.addr`` and the use in a rdregion/wrregion.
///
/// However this pass does not check whether the offset is in range (although
/// GenXBaling does check that before deciding to bale it in). The
/// GenXAddressCommoning pass sorts that out.
///
/// **IR restriction**: After this pass, a variable index in a rdregion/wrregion
/// must be the result of ``llvm.genx.convert.addr`` or ``llvm.genx.add.addr``.
/// Operand 0 of ``llvm.genx.add.addr`` must be the result of
/// ``llvm.genx.convert.addr``.
///
/// **IR restriction**: After this pass, up to GenXAddressCommoning, the result
/// of ``llvm.genx.convert.addr`` must have a single use in either a
/// ``llvm.genx.add.addr`` or as the index in rdregion/wrregion. The result
/// of ``llvm.genx.add.addr`` must have a single use as the index in
/// rdregion/wrregion.
///
//===----------------------------------------------------------------------===//
#define DEBUG_TYPE "GENX_CATEGORY"
#include "FunctionGroup.h"
#include "GenX.h"
#include "GenXConstants.h"
#include "GenXIntrinsics.h"
#include "GenXLiveness.h"
#include "GenXModule.h"
#include "GenXRegion.h"
#include "GenXTargetMachine.h"
#include "GenXUtil.h"
#include "vc/GenXOpts/Utils/KernelInfo.h"
#include "vc/GenXOpts/Utils/RegCategory.h"
#include "Probe/Assertion.h"
#include "llvmWrapper/IR/DerivedTypes.h"
#include "llvmWrapper/IR/InstrTypes.h"
#include "llvmWrapper/IR/Instructions.h"
#include <llvm/ADT/PostOrderIterator.h>
#include <llvm/Analysis/CFG.h>
#include <llvm/Analysis/ValueTracking.h>
#include <llvm/CodeGen/TargetPassConfig.h>
#include <llvm/GenXIntrinsics/GenXIntrinsics.h>
#include <llvm/IR/BasicBlock.h>
#include <llvm/IR/Constants.h>
#include <llvm/IR/Dominators.h>
#include <llvm/IR/Function.h>
#include <llvm/IR/InlineAsm.h>
#include <llvm/IR/Instructions.h>
#include <llvm/IR/Intrinsics.h>
#include <llvm/IR/Metadata.h>
#include <llvm/Support/Debug.h>
using namespace llvm;
using namespace genx;
namespace {
// CategoryAndAlignment : values returned from getCategoryAndAlignment*
// functions
struct CategoryAndAlignment {
unsigned Cat;
unsigned Align;
CategoryAndAlignment(unsigned Cat, unsigned Align = 0) : Cat(Cat), Align(Align) {}
};
class UsesCatInfo;
// GenX category pass
class GenXCategory : public FunctionGroupPass {
Function *Func = nullptr;
KernelMetadata KM;
GenXLiveness *Liveness = nullptr;
DominatorTreeGroupWrapperPass *DTs = nullptr;
const GenXSubtarget *Subtarget = nullptr;
const DataLayout *DL = nullptr;
SmallVector<Instruction *, 8> ToErase;
bool Modified = false;
// Vector of arguments and phi nodes that did not get a category.
SmallVector<Value *, 8> NoCategory;
bool InFGHead = false;
// Sometimes the pass may stuck on strongly connected components.
// This field indentifies such case and notifies that there's no need
// to wait till some other value's category is defined.
bool EnforceCategoryPromotion = false;
public:
static char ID;
explicit GenXCategory() : FunctionGroupPass(ID) { }
StringRef getPassName() const override {
return "GenX category conversion";
}
void getAnalysisUsage(AnalysisUsage &AU) const override;
bool runOnFunctionGroup(FunctionGroup &FG) override;
// createPrinterPass : get a pass to print the IR, together with the GenX
// specific analyses
Pass *createPrinterPass(raw_ostream &O,
const std::string &Banner) const override {
return createGenXGroupPrinterPass(O, Banner);
}
unsigned getCategoryForPhiIncomings(PHINode *Phi) const;
unsigned getCategoryForCallArg(Function *Callee, unsigned ArgNo) const;
unsigned getCategoryForInlasmConstraintedOp(CallInst *CI, unsigned ArgNo,
bool IsOutput) const;
CategoryAndAlignment getCategoryAndAlignmentForDef(Value *V) const;
CategoryAndAlignment getCategoryAndAlignmentForUse(Value::use_iterator U) const;
private:
using ConvListT = std::array<llvm::Instruction *, RegCategory::NUMCATEGORIES>;
bool processFunction(Function *F);
bool fixCircularPhis(Function *F);
bool processValue(Value *V);
bool handleLeftover();
Instruction *createConversion(Value *V, unsigned Cat);
ConvListT buildConversions(Value *Def, CategoryAndAlignment DefInfo, const UsesCatInfo &UsesInfo);
};
// AUse : an address use of a value in processValue()
struct AUse {
Instruction *user;
unsigned OperandNum;
unsigned Cat;
AUse(Value::use_iterator U, unsigned Cat)
: user(cast<Instruction>(U->getUser())),
OperandNum(U->getOperandNo()), Cat(Cat) {}
};
// almost real input iterator, minimum for range for was implemented
class Iterator final {
unsigned ShiftedMask_;
unsigned CurCat_;
public:
Iterator(unsigned Mask, unsigned Cat) : ShiftedMask_(Mask), CurCat_(Cat) {
validate();
}
unsigned operator*() const {
validate();
return CurCat_;
}
Iterator &operator++() {
validate();
ShiftedMask_ /= 2;
++CurCat_;
if (ShiftedMask_ == 0) {
CurCat_ = RegCategory::NUMCATEGORIES;
validate();
return *this;
}
for (; ShiftedMask_ % 2 == 0; ShiftedMask_ /= 2, ++CurCat_)
;
validate();
return *this;
}
friend bool operator==(const Iterator &lhs, const Iterator &rhs) {
return (lhs.ShiftedMask_ == rhs.ShiftedMask_ &&
lhs.CurCat_ == rhs.CurCat_);
}
friend bool operator!=(const Iterator &lhs, const Iterator &rhs) {
return !(lhs == rhs);
}
private:
void validate() const {
IGC_ASSERT_MESSAGE((ShiftedMask_ % 2 == 1 || CurCat_ == RegCategory::NUMCATEGORIES),
"invalid state");
}
};
// Implements only begin() and end()
// to iterate over categories of uses.
class Categories final {
unsigned Mask_;
public:
explicit Categories(unsigned Mask) : Mask_(Mask) {}
Iterator begin() const {
// we have no category
if (!Mask_)
return end();
// we have NONE category
if (Mask_ % 2 == 1)
return Iterator(Mask_, 0);
// we adding NONE category
Iterator FalseBegin(Mask_ + 1, 0);
// and now we get the real first category
return ++FalseBegin;
}
Iterator end() const { return Iterator(0, RegCategory::NUMCATEGORIES); }
};
// Encapsulates Category'n'Alignment analysis of value uses.
class UsesCatInfo final {
using UsesT = llvm::SmallVector<AUse, 8>;
UsesT Uses_;
unsigned Mask_;
unsigned MaxAlign_;
unsigned MostUsedCat_;
public:
UsesCatInfo() : Uses_(), Mask_(0), MaxAlign_(0) {}
UsesCatInfo(const GenXCategory &PassInfo, Value *V) : UsesCatInfo() {
std::array<int, RegCategory::NUMCATEGORIES> Stat = {0};
for (auto ui = V->use_begin(), ue = V->use_end(); ui != ue; ++ui) {
auto CatAlign = PassInfo.getCategoryAndAlignmentForUse(ui);
MaxAlign_ = std::max(MaxAlign_, CatAlign.Align);
Uses_.push_back(AUse(ui, CatAlign.Cat));
Mask_ |= 1 << CatAlign.Cat;
if (CatAlign.Cat != RegCategory::NONE)
++Stat[CatAlign.Cat];
}
auto MaxInStatIt = std::max_element(Stat.begin(), Stat.end());
MostUsedCat_ = MaxInStatIt - Stat.begin();
}
bool empty() const { return !Mask_; }
bool allHaveCat(unsigned cat) const { return !(Mask_ & ~(1 << cat)); }
const UsesT &getUses() const { return Uses_; }
unsigned getMaxAlign() const { return MaxAlign_; }
// When there's no real category uses (real is anything but NONE)
// behavior is undefined.
unsigned getMostUsedCat() const {
IGC_ASSERT_MESSAGE(!empty(),
"works only for cases when there are uses with real categories");
IGC_ASSERT_MESSAGE(!allHaveCat(RegCategory::NONE),
"works only for cases when there are uses with real categories");
return MostUsedCat_;
}
// meant to be used in range for
Categories getCategories() const { return Categories(Mask_); }
};
void placeConvAfterDef(Function *Func, Instruction *Conv, Value *Def) {
if (Instruction *Inst = dyn_cast<Instruction>(Def)) {
// Original value is an instruction. Insert just after it.
Conv->insertAfter(Inst);
Conv->setDebugLoc(Inst->getDebugLoc());
} else {
IGC_ASSERT_MESSAGE(isa<Argument>(Def), "must be an argument if not an instruction");
// Original value is a function argument. Insert at the start of the
// function.
Conv->insertBefore(&*Func->begin()->begin());
}
}
void placeConvBeforeUse(Instruction *Conv, Instruction *Use,
unsigned UseOperand) {
if (auto PhiUse = dyn_cast<PHINode>(Use)) {
// Use is in a phi node. Insert before terminator in corresponding
// incoming block.
Conv->insertBefore(PhiUse->getIncomingBlock(UseOperand)->getTerminator());
} else {
// Insert just before use.
Conv->insertBefore(Use);
Conv->setDebugLoc(Use->getDebugLoc());
}
}
} // end anonymous namespace
char GenXCategory::ID = 0;
namespace llvm { void initializeGenXCategoryPass(PassRegistry &); }
INITIALIZE_PASS_BEGIN(GenXCategory, "GenXCategory", "GenXCategory", false, false)
INITIALIZE_PASS_DEPENDENCY(DominatorTreeGroupWrapperPass)
INITIALIZE_PASS_DEPENDENCY(GenXLiveness)
INITIALIZE_PASS_END(GenXCategory, "GenXCategory", "GenXCategory", false, false)
FunctionGroupPass *llvm::createGenXCategoryPass()
{
initializeGenXCategoryPass(*PassRegistry::getPassRegistry());
return new GenXCategory();
}
void GenXCategory::getAnalysisUsage(AnalysisUsage &AU) const
{
FunctionGroupPass::getAnalysisUsage(AU);
AU.addRequired<DominatorTreeGroupWrapperPass>();
AU.addRequired<GenXLiveness>();
AU.addRequired<TargetPassConfig>();
AU.addPreserved<GenXModule>();
AU.addPreserved<GenXLiveness>();
AU.addPreserved<FunctionGroupAnalysis>();
AU.addPreserved<DominatorTreeGroupWrapperPass>();
AU.setPreservesCFG();
}
/***********************************************************************
* runOnFunctionGroup : run the category conversion pass for
* this FunctionGroup
*/
bool GenXCategory::runOnFunctionGroup(FunctionGroup &FG)
{
KM = KernelMetadata(FG.getHead());
DTs = &getAnalysis<DominatorTreeGroupWrapperPass>();
Liveness = &getAnalysis<GenXLiveness>();
Subtarget = &getAnalysis<TargetPassConfig>()
.getTM<GenXTargetMachine>()
.getGenXSubtarget();
DL = &FG.getModule()->getDataLayout();
EnforceCategoryPromotion = false;
bool Modified = false;
if (KM.isKernel()) {
// Get the offset of each kernel arg.
for (auto ai = FG.getHead()->arg_begin(), ae = FG.getHead()->arg_end();
ai != ae; ++ai) {
Argument *Arg = &*ai;
Liveness->getOrCreateLiveRange(Arg)->Offset = KM.getArgOffset(Arg->getArgNo());
}
}
// Mop up any struct phis, splitting into elements.
for (auto i = FG.begin(), e = FG.end(); i != e; ++i)
Modified |= splitStructPhis(*i);
// Do category conversion on each function in the group.
InFGHead = true;
for (auto i = FG.begin(), e = FG.end(); i != e; ++i) {
Modified |= processFunction(*i);
InFGHead = false;
}
Modified |= handleLeftover();
return Modified;
}
// Now iteratively process values that did not get a category. A valid
// category will eventually propagate through a web of phi nodes
// and/or subroutine args.
bool GenXCategory::handleLeftover() {
if (NoCategory.empty())
return false;
while (!NoCategory.empty()) {
auto NewEnd = std::remove_if(NoCategory.begin(), NoCategory.end(),
[this](Value *V) { return processValue(V); });
if (NewEnd == NoCategory.end()) {
IGC_ASSERT_MESSAGE(!EnforceCategoryPromotion,
"category promotion was enforced, still no progress");
EnforceCategoryPromotion = true;
continue;
}
// No need to enforce category promotion when there is progress. Even if
// it was enforced before.
EnforceCategoryPromotion = false;
NoCategory.erase(NewEnd, NoCategory.end());
}
return true;
}
// Common up constpred calls within a block.
static bool commonUpPredicate(BasicBlock *BB) {
bool Changed = false;
// Map from flatten predicate value to its constpred calls.
using key_type = std::pair<char, uint64_t>;
SmallDenseMap<key_type, SmallVector<Instruction *, 8>> ValMap;
for (auto &Inst : BB->getInstList()) {
if (GenXIntrinsic::getGenXIntrinsicID(&Inst) == GenXIntrinsic::genx_constantpred) {
Constant *V = cast<Constant>(Inst.getOperand(0));
if (auto *VT = dyn_cast<IGCLLVM::FixedVectorType>(V->getType())) {
unsigned NElts = VT->getNumElements();
if (NElts > 64)
continue;
uint64_t Bits = 0;
for (unsigned i = 0; i != NElts; ++i)
if (!V->getAggregateElement(i)->isNullValue())
Bits |= ((uint64_t)1 << i);
key_type Key{NElts, Bits};
auto Iter = ValMap.find(Key);
if (Iter == ValMap.end())
ValMap[Key].push_back(&Inst);
else if (Inst.hasOneUse() && Inst.user_back()->getParent() == BB)
// Just in case constpred is not from constant predicate loading. This
// ensures the first instruction dominates others in the same vector.
(Iter->second).push_back(&Inst);
}
}
}
// Common up when there are more than 2 uses, in which case it will not be
// worse than flag spills.
for (auto I = ValMap.begin(), E = ValMap.end(); I != E; ++I) {
auto &V = I->second;
int n = (int)V.size();
if (n > 2) {
Instruction *DomInst = V.front();
for (int i = 1; i < n; ++i) {
V[i]->replaceAllUsesWith(DomInst);
V[i]->eraseFromParent();
}
Changed = true;
}
}
return Changed;
}
/***********************************************************************
* processFunction : run the category conversion pass for this Function
*
* This does a postordered depth first traversal of the CFG,
* processing instructions within a basic block in reverse, to
* ensure that we see a def after its uses (ignoring phi node uses).
* This is specifically useful for an address conversion, where we want to
* see the constant add used in an indirect region (and convert it into a
* llvm.genx.add.addr) before we see the instruction it uses.
*/
bool GenXCategory::processFunction(Function *F)
{
Func = F;
// Before doing the category conversion, fix circular phis.
Modified = fixCircularPhis(F);
// Load constants in phi nodes.
loadPhiConstants(*F, DTs->getDomTree(F), *Subtarget, *DL, false);
// Process all instructions.
for (po_iterator<BasicBlock *> i = po_begin(&Func->getEntryBlock()),
e = po_end(&Func->getEntryBlock()); i != e; ++i) {
// This loop scans the basic block backwards. If any code is inserted
// before the current point, that code is scanned too.
BasicBlock *BB = *i;
for (Instruction *Inst = &BB->back(); Inst;
Inst = (Inst == &BB->front() ? nullptr : Inst->getPrevNode())) {
Modified |= loadNonSimpleConstants(Inst, *Subtarget, *DL, nullptr);
Modified |= loadConstants(Inst, *Subtarget, *DL);
if (!processValue(Inst))
NoCategory.push_back(Inst);
}
// This commons up constpred calls just loaded.
Modified |= commonUpPredicate(BB);
// Erase instructions (and their live ranges) as requested by processValue.
for (unsigned i = 0, e = ToErase.size(); i != e; ++i) {
Liveness->eraseLiveRange(ToErase[i]);
ToErase[i]->eraseFromParent();
}
ToErase.clear();
}
// Process all args.
for (auto fi = Func->arg_begin(), fe = Func->arg_end(); fi != fe; ++fi) {
Value *V = &*fi;
if (!processValue(V))
NoCategory.push_back(V);
}
return Modified;
}
/***********************************************************************
* fixCircularPhis : fix up overlapping circular phi nodes
*
* A phi node at the head of a loop can have a use in the phi nodes in the same
* basic block. If the use is after the def, it still refers to the value in
* the previous loop iteration, but the GenX backend cannot cope with the
* live range going round the loop and overlapping with its own start.
*
* This function spots any such phi node and works around it by inserting an
* extra copy (bitcast) just after the phi nodes in the basic block.
*
* A better solution for the future would be to re-order the phi nodes if
* possible, and only fall back to inserting a copy if there is circularity
* (e.g. a loop that swaps two variables in its body).
*/
bool GenXCategory::fixCircularPhis(Function *F)
{
bool Modified = false;
for (auto fi = Func->begin(), fe = Func->end(); fi != fe; ++fi) {
BasicBlock *BB = &*fi;
// Process phi nodes in one basic block.
for (auto bi = BB->begin(); ; ++bi) {
auto Phi = dyn_cast<PHINode>(&*bi);
if (!Phi)
break; // end of phi nodes
if (!GenXLiveness::wrapsAround(Phi, Phi))
continue;
// Overlapping circular phi node. Insert a copy.
// Note that the copy has to be split in the same way as a copy
// inserted in GenXCoalescing when coalescing fails, but we have
// our own code here because at this point we do not have any real
// and possibly coalesced live ranges like GenXCoalescing does.
Modified = true;
SmallVector<Use *, 8> Uses;
for (auto ui = Phi->use_begin(), ue = Phi->use_end(); ui != ue; ++ui)
Uses.push_back(&*ui);
// A phi node is never a struct -- GenXLowering removed struct phis.
IGC_ASSERT(!isa<StructType>(Phi->getType()));
// Insert a copy, split as required to be legal.
auto NewCopy =
Liveness->insertCopy(Phi, nullptr, BB->getFirstNonPHI(),
Phi->getName() + ".unoverlapper", 0, Subtarget);
// Change the uses that existed before we added the copy to use the
// copy instead.
for (auto ui = Uses.begin(), ue = Uses.end(); ui != ue; ++ui)
**ui = NewCopy;
}
}
return Modified;
}
/***********************************************************************
* processValue : category conversion for one value
*
* Return: whether category successfully chosen
*
* This returns false only for a function argument or a phi node where all
* uses are in phi nodes which themselves do not have a category yet.
*/
bool GenXCategory::processValue(Value *V)
{
// Check for special cases.
// Ignore void.
if (V->getType()->isVoidTy())
return true;
// Ignore i1 or vector of i1. Predicates do not use category
// conversion.
if (V->getType()->getScalarType()->isIntegerTy(1))
return true;
// Elements of a struct always have default (general or predicate) category.
if (isa<StructType>(V->getType()))
return true;
auto DefInfo = getCategoryAndAlignmentForDef(V);
UsesCatInfo UsesInfo(*this, V);
// more corner cases
if (UsesInfo.empty()) {
// Value not used: set its category and then ignore it. If the definition
// did not give us a category (probably an unused function arg), then
// arbitrarily make it general.
if (DefInfo.Cat == RegCategory::NONE)
Liveness->getOrCreateLiveRange(V, RegCategory::GENERAL, DefInfo.Align);
else
Liveness->getOrCreateLiveRange(V, DefInfo.Cat, DefInfo.Align);
return true;
}
else if (UsesInfo.allHaveCat(RegCategory::NONE))
{
if (DefInfo.Cat == RegCategory::NONE) {
// The "no categories at all" case can only happen for a value that is
// defined by a function argument or a phi node and used only in phi
// nodes or subroutine call args.
IGC_ASSERT_MESSAGE((isa<Argument>(V) || isa<PHINode>(V)), "no register category");
return false;
}
// Value defined with a category but only used in phi nodes.
Liveness->getOrCreateLiveRange(V, DefInfo.Cat, DefInfo.Align);
return true;
}
// main case
if (DefInfo.Cat == RegCategory::NONE) {
// NONE means that we're free to choose the category
if (isa<PHINode>(V))
// currently we'd like to propogate general through phi
DefInfo.Cat = RegCategory::GENERAL;
else
DefInfo.Cat = UsesInfo.getMostUsedCat();
}
Liveness->getOrCreateLiveRange(V, DefInfo.Cat, std::max(DefInfo.Align, UsesInfo.getMaxAlign()));
auto Convs = buildConversions(V, DefInfo, UsesInfo);
for (auto UseInfo : UsesInfo.getUses()) {
if (UseInfo.Cat != DefInfo.Cat && UseInfo.Cat != RegCategory::NONE) {
Instruction *Conv;
if (UseInfo.Cat == RegCategory::ADDRESS) {
// Case of address category requires a separate conversion for each use, at least until we
// get to GenXAddressCommoning where we decide whether we can common some of them up.
Conv = createConversion(V, UseInfo.Cat);
placeConvBeforeUse(Conv, UseInfo.user, UseInfo.OperandNum);
Liveness->getOrCreateLiveRange(Conv)->setCategory(UseInfo.Cat);
}
else
Conv = Convs[UseInfo.Cat];
IGC_ASSERT_MESSAGE(Conv, "must have such conversion");
UseInfo.user->setOperand(UseInfo.OperandNum, Conv);
}
}
// If V is now unused (which happens if it is a constant add and all its
// uses were addresses), then remove it.
if (V->use_empty())
ToErase.push_back(cast<Instruction>(V));
return true;
}
/***********************************************************************
* createConversion : create call to llvm.genx.convert intrinsic to represent
* register category conversion
*
* The new instruction is not inserted anywhere yet.
*
* In the case that we are asked to convert a use of an add or constant sub
* to an address, we instead create an llvm.genx.add.addr of the input
* to the add/sub.
*/
Instruction *GenXCategory::createConversion(Value *V, unsigned Cat)
{
IGC_ASSERT_MESSAGE(V->getType()->getScalarType()->isIntegerTy(),
"createConversion expects int type");
if (Cat == RegCategory::ADDRESS) {
Value *Input = V;
int Offset = 0;
for (;;) {
// Check for use of add/sub that can be baled in to a region as a
// constant offset. This also handles a chain of two or more adds.
int ThisOffset;
if (!GenXBaling::getIndexAdd(Input, &ThisOffset) &&
!GenXBaling::getIndexOr(Input, ThisOffset))
break;
if (ThisOffset < G4_MIN_ADDR_IMM)
break;
Offset += ThisOffset;
Input = cast<Instruction>(Input)->getOperand(0);
}
if (Input != V) {
// Turn the add/sub into llvm.genx.add.addr. This could be out of range as
// a constant offset in an indirect operand at this stage;
// GenXAddressCommoning sorts that out by adjusting the constant offset in
// the llvm.genx.convert.addr.
return createAddAddr(Input, ConstantInt::get(V->getType(), Offset),
V->getName() + ".addradd", nullptr, Func->getParent());
}
}
// Normal conversion. If the source is an integer creation intrinsic
// and this isn't an address conversion, use the operand for that
// intrinsic call directly rather than using the result of the intrinsic.
// This helps the jitter to generate better code when surface constants
// are used in send intructions.
if (Cat != RegCategory::ADDRESS) {
if (GenXIntrinsic::getGenXIntrinsicID(V) == GenXIntrinsic::genx_constanti)
V = cast<CallInst>(V)->getArgOperand(0);
return createConvert(V, V->getName() + ".categoryconv", nullptr,
Func->getParent());
}
return createConvertAddr(V, 0, V->getName() + ".categoryconv", nullptr,
Func->getParent());
}
/***********************************************************************
* Creates conversion instructions, places them in the function (next to the
* def)
*
* Returns an array of created conversion (cons[Category] holds
* instruction if we need conversion to such Category and nullptr otherwise).
* Doesn't produce address category conversion.
*/
GenXCategory::ConvListT
GenXCategory::buildConversions(Value *Def, CategoryAndAlignment DefInfo,
const UsesCatInfo &UsesInfo) {
ConvListT Convs = {nullptr};
for (auto Cat : UsesInfo.getCategories()) {
// NONE doesn't require conversion, ADDRESS requirs conversion before
// every use (not after def, so we won't create it here)
if (Cat != DefInfo.Cat && Cat != RegCategory::NONE &&
Cat != RegCategory::ADDRESS) {
auto Conv = createConversion(Def, Cat);
placeConvAfterDef(Func, Conv, Def);
Liveness->getOrCreateLiveRange(Conv)->setCategory(Cat);
Convs[Cat] = Conv;
}
}
return Convs;
}
/***********************************************************************
* intrinsicCategoryToRegCategory : convert intrinsic arg category to
* register category
*
* This converts a GenXIntrinsicInfo::* category, as returned by
* GenXIntrinsicInfo::ArgInfo::getCategory(), into a register category
* as stored in a live range.
*/
static unsigned intrinsicCategoryToRegCategory(unsigned ICat)
{
switch (ICat) {
case GenXIntrinsicInfo::ADDRESS:
return RegCategory::ADDRESS;
case GenXIntrinsicInfo::PREDICATION:
case GenXIntrinsicInfo::PREDICATE:
return RegCategory::PREDICATE;
case GenXIntrinsicInfo::SAMPLER:
return RegCategory::SAMPLER;
case GenXIntrinsicInfo::SURFACE:
return RegCategory::SURFACE;
default:
return RegCategory::GENERAL;
}
}
/***********************************************************************
* getCategoryAndAlignmentForDef : get register category and alignment for a def
*
* This returns RegCategory:: value, or RegCategory::NONE if no category
* is discernable.
*/
CategoryAndAlignment GenXCategory::getCategoryAndAlignmentForDef(Value *V) const
{
if (V->getType()->getScalarType()->getPrimitiveSizeInBits() == 1)
return RegCategory::PREDICATE;
if (Argument *Arg = dyn_cast<Argument>(V)) {
auto *F = Arg->getParent();
// This is a function Argument.
if (!InFGHead) {
// It is an arg in a subroutine. Get the category from the corresponding
// arg at some call site. (We should not have disagreement among the
// call sites and the function arg, since whichever one gets a category
// first forces the category of all the others.)
return getCategoryForCallArg(F, Arg->getArgNo());
}
unsigned ArgNo = Arg->getArgNo();
if (KM.getNumArgs() > ArgNo) {
// The function is a kernel, and has argument kind metadata for
// this argument. Determine the category from the kind.
return KM.getArgCategory(ArgNo);
}
// The function is not a kernel, or does not have the appropriate
// metadata. Set to no particular category, so the arg's uses will
// determine the category. This is the fallback for compatibility with
// hand coded LLVM IR from before this metadata was added. (If we only
// had to cope with non-kernel functions, we could just return GENERAL.)
// FIXME: temporary fix for stack calls. We need to figure out how to
// determine arguments category if it cannot be deduced from the arg uses.
// * calls from another function groups might help (but we do not have
// liveness -> category for them). What about standalone stack calls?
IGC_ASSERT(genx::requiresStackCall(F));
return getCategoryForCallArg(F, Arg->getArgNo());
}
// The def is a phi-instruction.
if (PHINode *Phi = dyn_cast<PHINode>(V)) {
// This is a phi node. Get the category from one of the incomings. (We
// should not have disagreement among the incomings, since whichever
// one gets a category first forces the category of all the others.)
return getCategoryForPhiIncomings(Phi);
}
// Multiple outputs of inline assembly instruction
// result in a structure and those elements are extracted
// with extractelement
if (ExtractValueInst *Extract = dyn_cast<ExtractValueInst>(V)) {
auto CI = dyn_cast<CallInst>(Extract->getAggregateOperand());
if (CI && CI->isInlineAsm())
return getCategoryForInlasmConstraintedOp(CI, Extract->getIndices()[0],
true /*IsOutput*/);
}
// The def is a call-inst
if (CallInst *CI = dyn_cast<CallInst>(V)) {
if (Function *Callee = CI->getCalledFunction()) {
unsigned IntrinsicID = GenXIntrinsic::getAnyIntrinsicID(Callee);
// We should not see genx_convert, as it is inserted into a value after
// using this function to determine its category.
IGC_ASSERT(IntrinsicID != GenXIntrinsic::genx_convert);
if (IntrinsicID == GenXIntrinsic::genx_convert_addr)
return RegCategory::ADDRESS;
if (GenXIntrinsic::isAnyNonTrivialIntrinsic(IntrinsicID) && !GenXIntrinsic::isRdRegion(IntrinsicID)
&& !GenXIntrinsic::isWrRegion(IntrinsicID) && !GenXIntrinsic::isAbs(IntrinsicID)) {
// For any normal intrinsic, look up the argument class.
GenXIntrinsicInfo II(IntrinsicID);
auto AI = II.getRetInfo();
return CategoryAndAlignment(
intrinsicCategoryToRegCategory(AI.getCategory()),
getLogAlignment(AI.getAlignment(), Subtarget
? Subtarget->getGRFByteSize()
: defaultGRFByteSize));
} else if (GenXIntrinsic::isRdRegion(IntrinsicID)) {
// Add this to avoid conversion in case of read-region on SurfaceIndex
// or SamplerIndex type
auto RC = getCategoryAndAlignmentForDef(
CI->getOperand(GenXIntrinsic::GenXRegion::OldValueOperandNum));
if (RC.Cat == RegCategory::SURFACE ||
RC.Cat == RegCategory::SAMPLER)
return RC.Cat;
}
} else if (CI->isInlineAsm()) {
return getCategoryForInlasmConstraintedOp(CI, 0, true /*IsOutput*/);
}
}
return RegCategory::GENERAL;
}
/***********************************************************************
* getCategoryForInlasmConstraintedOp : get register category for a
* operand of inline assembly (both for
* output and for input). Category of
* operand depends on its constraint.
*
*/
unsigned GenXCategory::getCategoryForInlasmConstraintedOp(CallInst *CI,
unsigned ArgNo,
bool IsOutput) const {
IGC_ASSERT_MESSAGE(CI->isInlineAsm(), "Inline asm expected");
InlineAsm *IA = dyn_cast<InlineAsm>(IGCLLVM::getCalledValue(CI));
IGC_ASSERT_MESSAGE(!IA->getConstraintString().empty(), "Here should be constraints");
auto ConstraintsInfo = genx::getGenXInlineAsmInfo(CI);
if (!IsOutput)
ArgNo += genx::getInlineAsmNumOutputs(CI);
auto Info = ConstraintsInfo[ArgNo];
switch (Info.getConstraintType()) {
default:
IGC_ASSERT_EXIT_MESSAGE(0, "unreachable while setting category in constraints");
case ConstraintType::Constraint_a:
case ConstraintType::Constraint_rw:
case ConstraintType::Constraint_r:
return RegCategory::GENERAL;
case ConstraintType::Constraint_n:
case ConstraintType::Constraint_i:
case ConstraintType::Constraint_F:
return RegCategory::NONE;
case ConstraintType::Constraint_cr:
return RegCategory::PREDICATE;
}
}
/***********************************************************************
* getCategoryAndAlignmentForUse : get register category for a use
*
* This returns RegCategory:: value, or RegCategory::NONE if no category
* is discernable.
*/
CategoryAndAlignment GenXCategory::getCategoryAndAlignmentForUse(
Value::use_iterator U) const
{
Value *V = U->get();
if (V->getType()->getScalarType()->isIntegerTy(1))
return RegCategory::PREDICATE;
auto user = cast<Instruction>(U->getUser());
if (PHINode *Phi = dyn_cast<PHINode>(user)) {
// This is a phi node. Get the category (if any) from the result, or from
// one of the incomings. (We should not have disagreement among the
// incomings, since whichever one gets a category first forces the category
// of all the others.)
if (auto LR = Liveness->getLiveRangeOrNull(Phi)) {
auto Cat = LR->getCategory();
if (Cat != RegCategory::NONE)
return Cat;
}
return getCategoryForPhiIncomings(Phi);
}
unsigned Category = RegCategory::GENERAL;
if (CallInst *CI = dyn_cast<CallInst>(user)) {
if (CI->isInlineAsm())
Category = getCategoryForInlasmConstraintedOp(CI, U->getOperandNo(),
false /*IsOutput*/);
else if (IGCLLVM::isIndirectCall(*CI))
Category = RegCategory::GENERAL;
else {
Function *Callee = CI->getCalledFunction();
unsigned IntrinID = GenXIntrinsic::not_any_intrinsic;
if (Callee)
IntrinID = GenXIntrinsic::getAnyIntrinsicID(Callee);
// We should not see genx_convert, as it is inserted into a value after
// using this function to determine its category.
IGC_ASSERT(IntrinID != GenXIntrinsic::genx_convert);
// For a read or write region or element intrisic, where the use we have
// is the address, mark as needing an address register.
switch (IntrinID) {
case GenXIntrinsic::not_any_intrinsic:
// Arg in subroutine call. Get the category from the function arg,
// or the arg at another call site. (We should not have disagreement
// among the call sites and the function arg, since whichever one
// gets a category first forces the category of all the others.)
Category = getCategoryForCallArg(Callee, U->getOperandNo());
break;
case GenXIntrinsic::genx_convert_addr:
Category = RegCategory::GENERAL;
break;
case GenXIntrinsic::genx_rdregioni:
case GenXIntrinsic::genx_rdregionf:
if (U->getOperandNo() == 4) // is addr-operand
Category = RegCategory::ADDRESS;
else if (GenXIntrinsic::GenXRegion::OldValueOperandNum == U->getOperandNo())
Category = RegCategory::NONE; // do not assign use-category
break;
case GenXIntrinsic::genx_wrregioni:
case GenXIntrinsic::genx_wrregionf:
if (U->getOperandNo() == 5) // is addr-operand
Category = RegCategory::ADDRESS;
break;
case GenXIntrinsic::genx_absf:
case GenXIntrinsic::genx_absi:
case GenXIntrinsic::genx_output:
case GenXIntrinsic::genx_output_1:
break;
default: {
// For any other intrinsic, look up the argument class.
GenXIntrinsicInfo II(IntrinID);
auto AI = II.getArgInfo(U->getOperandNo());
return CategoryAndAlignment(
intrinsicCategoryToRegCategory(AI.getCategory()),
getLogAlignment(AI.getAlignment(),
Subtarget ? Subtarget->getGRFByteSize()
: defaultGRFByteSize));
}
break;
}
}
}
return Category;
}
/***********************************************************************
* getCategoryForPhiIncomings : get register category from phi incomings
*
* Return: register category from a non-const incoming with a known category
* else NONE if at least one incoming is non-constant
* else GENERAL
*
* We will not have disagreement among the incomings, since whichever one gets
* a category first forces the category of all the others.
*/
unsigned GenXCategory::getCategoryForPhiIncomings(PHINode *Phi) const {
IGC_ASSERT_MESSAGE(!Phi->getType()->isIntOrIntVectorTy(1),
"pregicate phis should've been already considered");
if (llvm::all_of(Phi->incoming_values(),
[](const Use &Op) { return isa<Constant>(Op.get()); }))
// All incomings are constant. Arbitrarily make the phi node value
// general category.
return RegCategory::GENERAL;
auto IncomingWithCategory =
llvm::find_if(Phi->incoming_values(), [this](const Use &Op) {
auto *LR = Liveness->getLiveRangeOrNull(Op.get());
return LR && LR->getCategory() != RegCategory::NONE;
});
if (IncomingWithCategory != Phi->incoming_values().end()) {
auto PhiCategory =
Liveness->getLiveRange(IncomingWithCategory->get())->getCategory();
IGC_ASSERT_MESSAGE(
llvm::all_of(Phi->incoming_values(),
[this, PhiCategory](const Use &Op) {
auto *LR = Liveness->getLiveRangeOrNull(Op.get());
return !LR || LR->getCategory() == RegCategory::NONE ||
LR->getCategory() == PhiCategory;
}),
"Phi incoming values categories don't correspond");
return PhiCategory;
}
// If promotion is enforced, only one constant is enough to claim the phi
// to have general category.
if (EnforceCategoryPromotion &&
llvm::any_of(Phi->incoming_values(),
[](const Use &Op) { return isa<Constant>(Op.get()); }))
return RegCategory::GENERAL;
return RegCategory::NONE;
}
/***********************************************************************
* getCategoryForCallArg : get register category from subroutine arg or
* the corresponding arg at some call site
*
* Enter: Callee = function being called
* ArgNo = argument number
*
* Return: register category from subroutine arg or a call arg with a
* known category, else NONE if no category found
*
* We will not have disagreement among the subroutine arg and its corresponding
* call args, since whichever one gets a category first forces the category of
* all the others.
*/
unsigned GenXCategory::getCategoryForCallArg(Function *Callee, unsigned ArgNo) const
{
IGC_ASSERT(Callee);
// First try the subroutine arg.
auto ai = Callee->arg_begin();
for (unsigned i = 0; i != ArgNo; ++i, ++ai)
;
if (auto LR = Liveness->getLiveRangeOrNull(&*ai)) {
unsigned Cat = LR->getCategory();
if (Cat != RegCategory::NONE)
return Cat;
}
// Then try the arg at each call site.
for (auto *U: Callee->users()) {
if (auto *CI = checkFunctionCall(U, Callee)) {
auto ArgV = CI->getArgOperand(ArgNo);
if (auto LR = Liveness->getLiveRangeOrNull(ArgV)) {
unsigned Cat = LR->getCategory();
if (Cat != RegCategory::NONE)
return Cat;
}
}
}
// special case handling to break deadlock when all uses are undef or stack
// call arg category cannot be deduced from the uses in the function, force
// the argument to be GENERAL
return EnforceCategoryPromotion ? RegCategory::GENERAL : RegCategory::NONE;
}
| 15,762 |
1,138 | """
Required Notice: Copyright (C) Zoomer Analytics GmbH.
xlwings PRO is dual-licensed under one of the following licenses:
* PolyForm Noncommercial License 1.0.0 (for noncommercial use):
https://polyformproject.org/licenses/noncommercial/1.0.0
* xlwings PRO License (for commercial use):
https://github.com/xlwings/xlwings/blob/main/LICENSE_PRO.txt
Commercial licenses can be purchased at https://www.xlwings.org
"""
import sys
import warnings
from ... import mistune
from ...conversion import Converter
class Style:
def __init__(self, display_name=None):
if display_name:
self.display_name = display_name
else:
self.display_name = ""
def __repr__(self):
s = ""
for attribute in vars(self):
if getattr(self, attribute) and attribute != "display_name":
s += f"{self.display_name}.{attribute}: {getattr(self, attribute)}\n"
return s.replace("\n\n", "\n")
class FontStyle(Style):
def __init__(
self,
display_name=None,
color=None,
size=None,
bold=None,
italic=None,
name=None,
):
super().__init__(display_name=display_name)
self.color = color
self.size = size
self.bold = bold
self.italic = italic
self.name = name
class MarkdownStyle:
"""
``MarkdownStyle`` defines how ``Markdown`` objects are being rendered in Excel cells
or shapes. Start by instantiating a ``MarkdownStyle`` object. Printing it will show
you the current (default) style:
>>> style = MarkdownStyle()
>>> style
<MarkdownStyle>
h1.font: .bold: True
h1.blank_lines_after: 1
paragraph.blank_lines_after: 1
unordered_list.bullet_character: •
unordered_list.blank_lines_after: 1
strong.bold: True
emphasis.italic: True
You can override the defaults, e.g., to make ``**strong**`` text red instead of
bold, do this:
>>> style.strong.bold = False
>>> style.strong.color = (255, 0, 0)
>>> style.strong
strong.color: (255, 0, 0)
.. versionadded:: 0.23.0
"""
class __Heading1(Style):
def __init__(self):
super().__init__(display_name="h1")
self.font = FontStyle(bold=True)
self.blank_lines_after = 0
class __Paragraph(Style):
def __init__(self):
super().__init__(display_name="paragraph")
self.blank_lines_after = 1
class __UnorderedList(Style):
def __init__(self):
super().__init__(display_name="unordered_list")
self.bullet_character = "\u2022"
self.blank_lines_after = 1
def __init__(self):
self.h1 = self.__Heading1()
self.paragraph = self.__Paragraph()
self.unordered_list = self.__UnorderedList()
self.strong = FontStyle(display_name="strong", bold=True)
self.emphasis = FontStyle(display_name="emphasis", italic=True)
def __repr__(self):
s = "<MarkdownStyle>\n"
for attribute in vars(self):
s += f"{getattr(self, attribute)}"
return s
class Markdown:
"""
Markdown objects can be assigned to a single cell or shape via ``myrange.value`` or
``myshape.text``. They accept a string in Markdown format which will cause the text
in the cell to be formatted accordingly. They can also be used in
``mysheet.render_template()``.
.. note:: On macOS, formatting is currently not supported, but things like bullet
points will still work.
Arguments
---------
text : str
The text in Markdown syntax
style : MarkdownStyle object, optional
The MarkdownStyle object defines how the text will be formatted.
Examples
--------
>>> mysheet['A1'].value = Markdown("A text with *emphasis* and **strong** style.")
>>> myshape.text = Markdown("A text with *emphasis* and **strong** style.")
.. versionadded:: 0.23.0
"""
def __init__(self, text, style=MarkdownStyle()):
self.text = text
self.style = style
class MarkdownConverter(Converter):
@classmethod
def write_value(cls, value, options):
return render_text(value.text, value.style)
def traverse_ast_node(tree, data=None, level=0):
data = (
{
"length": [],
"type": [],
"parent_type": [],
"text": [],
"parents": [],
"level": [],
}
if data is None
else data
)
for element in tree:
data["parents"] = data["parents"][:level]
if "children" in element:
data["parents"].append(element)
traverse_ast_node(element["children"], data, level=level + 1)
else:
data["level"].append(level)
data["parent_type"].append([parent["type"] for parent in data["parents"]])
data["type"].append(element["type"])
if element["type"] == "text":
data["length"].append(len(element["text"]))
data["text"].append(element["text"])
elif element["type"] == "linebreak":
data["length"].append(1)
data["text"].append("\n")
return data
def flatten_ast(value):
parse_ast = mistune.create_markdown(renderer=mistune.AstRenderer())
ast = parse_ast(value)
flat_ast = []
for node in ast:
rv = traverse_ast_node([node])
del rv["parents"]
flat_ast.append(rv)
return flat_ast
def render_text(text, style):
flat_ast = flatten_ast(text)
output = ""
for node in flat_ast:
# heading/list currently don't respect the level
if "heading" in node["parent_type"][0]:
output += "".join(node["text"])
output += "\n" + style.h1.blank_lines_after * "\n"
elif "paragraph" in node["parent_type"][0]:
output += "".join(node["text"])
output += "\n" + style.paragraph.blank_lines_after * "\n"
elif "list" in node["parent_type"][0]:
for j in node["text"]:
output += f"{style.unordered_list.bullet_character} {j}\n"
output += style.unordered_list.blank_lines_after * "\n"
return output.rstrip("\n")
def format_text(parent, text, style):
if sys.platform.startswith("darwin"):
# Characters formatting is broken because of a bug in AppleScript/Excel 2016
warnings.warn("Markdown formatting is currently ignored on macOS.")
return
flat_ast = flatten_ast(text)
position = 0
for node in flat_ast:
if "heading" in node["parent_type"][0]:
node_length = sum(node["length"]) + style.h1.blank_lines_after + 1
apply_style_to_font(
style.h1.font, parent.characters[position : position + node_length].font
)
elif "paragraph" in node["parent_type"][0]:
node_length = sum(node["length"]) + style.paragraph.blank_lines_after + 1
intra_node_position = position
for ix, j in enumerate(node["parent_type"]):
selection = slice(
intra_node_position, intra_node_position + node["length"][ix]
)
if "strong" in j:
apply_style_to_font(style.strong, parent.characters[selection].font)
elif "emphasis" in j:
apply_style_to_font(
style.emphasis, parent.characters[selection].font
)
intra_node_position += node["length"][ix]
elif "list" in node["parent_type"][0]:
node_length = sum(node["length"]) + style.unordered_list.blank_lines_after
for _ in node["text"]:
# TODO: check ast level to allow nested **strong** etc.
node_length += 3 # bullet, space and new line
else:
node_length = sum(node["length"])
position += node_length
def apply_style_to_font(style_object, font_object):
for attribute in vars(style_object):
if getattr(style_object, attribute):
setattr(font_object, attribute, getattr(style_object, attribute))
| 3,582 |
4,332 | import os
import time
import numpy as np
# from IPython import embed
available_shots = {"three": 3, "one": 1}
# available_shots = {'three':3}
for shot, shots in available_shots.items():
print("## perform experiments on {}-shot wikipara-10K ##".format(shot))
# shots = available_shots[shot]
num_of_classes = 10000
leaf_example_multiplier = 4 # 2
lr = 0.1
bits = 29 # 30
passes = 2 # 1
# hal_version = 1
# num_queries = 1 #int(np.log(shots*num_of_classes)/np.log(2.))
alpha = 0.1
learn_at_leaf = True
use_oas = False
dream_at_update = 1
dream_repeats = 5
loss = "squared"
online = False
sort_feature = True
tree_node = int(
2
* passes
* (
num_of_classes
* shots
/ (np.log(num_of_classes * shots) / np.log(2) * leaf_example_multiplier)
)
)
train_data = "paradata10000_{}_shot.vw.train".format(shot)
test_data = "paradata10000_{}_shot.vw.test".format(shot)
if os.path.exists(train_data) is not True:
os.system("wget http://kalman.ml.cmu.edu/wen_datasets/{}".format(train_data))
if os.path.exists(test_data) is not True:
os.system("wget http://kalman.ml.cmu.edu/wen_datasets/{}".format(test_data))
saved_model = "{}.vw".format(train_data)
print("## Training...")
start = time.time()
command_line = f"../../build/vowpalwabbit/vw -d {train_data} --memory_tree {tree_node} {'--learn_at_leaf' if learn_at_leaf else ''} --max_number_of_labels {num_of_classes} {'--oas' if use_oas else ''} {'--online' if online else ''} --dream_at_update {dream_at_update}\
--leaf_example_multiplier {leaf_example_multiplier} --dream_repeats {dream_repeats} {'--sort_features' if sort_feature else ''} \
--alpha {alpha} -l {lr} -b {bits} -c --passes {passes} --loss_function {loss} --holdout_off -f {saved_model}"
print(command_line)
os.system(command_line)
train_time = time.time() - start
# test:
print("## Testing...")
start = time.time()
os.system("../../build/vowpalwabbit/vw {} -i {}".format(test_data, saved_model))
test_time = time.time() - start
print("## train time {}, and test time {}".format(train_time, test_time))
| 969 |
445 | <reponame>prismake/typegql
{
"$schema": "https://schemastore.azurewebsites.net/schemas/json/tsconfig.json",
"extends": "./tsconfig.json",
"compilerOptions": {
"declaration": true,
"emitDeclarationOnly": true,
"plugins": [
{
"transform": "@zerollup/ts-transform-paths"
}
]
}
} | 146 |
711 | package com.java110.common.dao;
import com.java110.utils.exception.DAOException;
import com.java110.entity.merchant.BoMerchant;
import com.java110.entity.merchant.BoMerchantAttr;
import com.java110.entity.merchant.Merchant;
import com.java110.entity.merchant.MerchantAttr;
import java.util.List;
import java.util.Map;
/**
* IOT同步错误日志记录组件内部之间使用,没有给外围系统提供服务能力
* IOT同步错误日志记录服务接口类,要求全部以字符串传输,方便微服务化
* 新建客户,修改客户,删除客户,查询客户等功能
*
* Created by wuxw on 2016/12/27.
*/
public interface IMachineTranslateErrorServiceDao {
/**
* 保存 IOT同步错误日志记录信息
* @param info
* @throws DAOException DAO异常
*/
void saveMachineTranslateErrorInfo(Map info) throws DAOException;
/**
* 查询IOT同步错误日志记录信息(instance过程)
* 根据bId 查询IOT同步错误日志记录信息
* @param info bId 信息
* @return IOT同步错误日志记录信息
* @throws DAOException DAO异常
*/
List<Map> getMachineTranslateErrorInfo(Map info) throws DAOException;
/**
* 修改IOT同步错误日志记录信息
* @param info 修改信息
* @throws DAOException DAO异常
*/
void updateMachineTranslateErrorInfo(Map info) throws DAOException;
/**
* 查询IOT同步错误日志记录总数
*
* @param info IOT同步错误日志记录信息
* @return IOT同步错误日志记录数量
*/
int queryMachineTranslateErrorsCount(Map info);
}
| 883 |
4,538 | <filename>hardware/chip/haas1000/drivers/services/multimedia/speech/inc/iirfilt.h
/*
* Copyright (C) 2015-2020 Alibaba Group Holding Limited
*/
#ifndef IIRFILT_H
#define IIRFILT_H
#include <stdint.h>
enum IIR_BIQUARD_TYPE
{
// pass through
IIR_BIQUARD_PASS = 0,
// raw filter
IIR_BIQUARD_RAW,
// low pass filter
IIR_BIQUARD_LPF,
// high pass filter
IIR_BIQUARD_HPF,
// band pass filter, constant skirt gain, peak gain = Q
IIR_BIQUARD_BPF0,
// band pass filter, const 0 dB peak gain
IIR_BIQUARD_BPF1,
// notch filter
IIR_BIQUARD_NOTCH,
// allpass filter
IIR_BIQUARD_APF,
// peakingEQ
IIR_BIQUARD_PEAKINGEQ,
// low shelf filter
IIR_BIQUARD_LOWSHELF,
// high shelf filter
IIR_BIQUARD_HIGHSHELF,
IIR_BIQUARD_QTY
};
struct IirBiquardState
{
float a1, a2, b0, b1, b2;
float s0, s1, s2;
};
typedef struct IirBiquardState IirBiquardState;
void iirfilt_design(IirBiquardState *st, int fs, int f0, float gain, float q, enum IIR_BIQUARD_TYPE type);
void iirfilt_raw(IirBiquardState *st, float b0, float b1, float b2, float a1, float a2);
void iirfilt_reset(IirBiquardState *st, int stages);
void iirfilt_process(IirBiquardState *st, int stages, int16_t *buf, int frame_size);
void iirfilt_process_int24(IirBiquardState *st, int stages, int32_t *buf, int frame_size);
void iirfilt_process_float(IirBiquardState *st, int stages, float *buf, int frame_size);
/* Deal with master gain in iir */
void iirfilt_process2(IirBiquardState *st, int stages, float master_gain, int16_t *buf, int frame_size);
void iirfilt_process2_int24(IirBiquardState *st, int stages, float master_gain, int32_t *buf, int frame_size);
void iirfilt_process2_float(IirBiquardState *st, int stages, float master_gain, float *buf, int frame_size);
void iirfilt_process3(IirBiquardState *st, int stages, float master_gain, int16_t *buf, int frame_size, int stride);
void iirfilt_process3_int24(IirBiquardState *st, int stages, float master_gain, int32_t *buf, int frame_size, int stride);
void iirfilt_process3_float(IirBiquardState *st, int stages, float master_gain, float *buf, int frame_size, int stride);
#endif | 909 |
400 | // -*- C++ -*- Copyright (c) Microsoft Corporation; see license.txt
#pragma once
#include "LLS.h"
#if 0
{
const auto xydata = {V(0.f, 4.f), V(1.f, 4.f), V(2.f, 5.f), V(3.f, 4.f)};
using Eval = LinearRegressionPolynomialOrder<2>;
LinearRegression<2, 1, Eval> regression(xydata.num());
for (auto xy : xydata) regression.enter(ArView(xy[0]), xy[1]);
auto ar = regression.get_solution();
for (auto xy : xydata) showf("x=%g y=%g yfit=%g\n", xy[0], xy[1], dot(ar, Eval()(ArView(xy[0]))));
}
#endif
namespace hh {
template<int N> struct LinearRegressionPolynomialOrder {
static constexpr int D = 1;
Vec<float,N> operator()(const Vec<float,D>& p) const {
Vec<float,N> ar;
float prod = 1.f; for_int(i, N) { ar[i] = prod; prod *= p[0]; }
return ar;
}
};
// Given m input points of dimension D,
// we want to fit a function Eval that linearly combines N terms which are functions of the input,
// each multiplied by an unknown coefficient c.
// The goal is to find c by least-squares minimization.
template<int N, int D, typename Eval = Vec<float,N>(const Vec<float,D>&)> class LinearRegression {
public:
LinearRegression(int m) : _lls(m, N, 1) { }
void enter(const Vec<float,D>& p, float val) {
assertx(_row<_lls.num_rows());
Vec<float,N> ar = _eval(p);
_lls.enter_a_r(_row, ar);
_lls.enter_b_r(_row, ArView(val));
_row++;
}
Vec<float,N> get_solution() {
assertx(_lls.solve());
Vec<float,N> ar; _lls.get_x_c(0, ar);
return ar;
}
private:
SvdDoubleLLS _lls;
Eval _eval;
int _row {0};
};
} // namespace hh
| 767 |
1,500 | """Benchmarking experiment of the ContinuousMLPQFunction."""
import tensorflow as tf
from garage import wrap_experiment
from garage.envs import GymEnv, normalize
from garage.experiment import deterministic
from garage.np.exploration_policies import AddOrnsteinUhlenbeckNoise
from garage.replay_buffer import PathBuffer
from garage.sampler import FragmentWorker, LocalSampler
from garage.tf.algos import DDPG
from garage.tf.policies import ContinuousMLPPolicy
from garage.tf.q_functions import ContinuousMLPQFunction
from garage.trainer import TFTrainer
hyper_params = {
'policy_lr': 1e-4,
'qf_lr': 1e-3,
'policy_hidden_sizes': [64, 64],
'qf_hidden_sizes': [64, 64],
'n_epochs': 300,
'steps_per_epoch': 20,
'n_exploration_steps': 100,
'n_train_steps': 50,
'discount': 0.9,
'tau': 1e-2,
'replay_buffer_size': int(1e6),
'sigma': 0.2,
}
@wrap_experiment
def continuous_mlp_q_function(ctxt, env_id, seed):
"""Create Continuous MLP QFunction on TF-DDPG.
Args:
ctxt (ExperimentContext): The experiment configuration used by
:class:`~Trainer` to create the :class:`~Snapshotter`.
env_id (str): Environment id of the task.
seed (int): Random positive integer for the trial.
"""
deterministic.set_seed(seed)
with TFTrainer(ctxt) as trainer:
env = normalize(GymEnv(env_id))
policy = ContinuousMLPPolicy(
env_spec=env.spec,
name='ContinuousMLPPolicy',
hidden_sizes=hyper_params['policy_hidden_sizes'],
hidden_nonlinearity=tf.nn.relu,
output_nonlinearity=tf.nn.tanh)
exploration_policy = AddOrnsteinUhlenbeckNoise(
env.spec, policy, sigma=hyper_params['sigma'])
qf = ContinuousMLPQFunction(
env_spec=env.spec,
hidden_sizes=hyper_params['qf_hidden_sizes'],
hidden_nonlinearity=tf.nn.relu,
name='ContinuousMLPQFunction')
replay_buffer = PathBuffer(
capacity_in_transitions=hyper_params['replay_buffer_size'])
sampler = LocalSampler(agents=exploration_policy,
envs=env,
max_episode_length=env.spec.max_episode_length,
is_tf_worker=True,
worker_class=FragmentWorker)
ddpg = DDPG(env_spec=env.spec,
policy=policy,
qf=qf,
replay_buffer=replay_buffer,
sampler=sampler,
steps_per_epoch=hyper_params['steps_per_epoch'],
policy_lr=hyper_params['policy_lr'],
qf_lr=hyper_params['qf_lr'],
target_update_tau=hyper_params['tau'],
n_train_steps=hyper_params['n_train_steps'],
discount=hyper_params['discount'],
min_buffer_size=int(1e4),
exploration_policy=exploration_policy,
policy_optimizer=tf.compat.v1.train.AdamOptimizer,
qf_optimizer=tf.compat.v1.train.AdamOptimizer)
trainer.setup(ddpg, env)
trainer.train(n_epochs=hyper_params['n_epochs'],
batch_size=hyper_params['n_exploration_steps'])
| 1,624 |
12,278 | /*
* Copyright <NAME> 2019.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* https://www.boost.org/LICENSE_1_0.txt)
*/
/*!
* \file form_auto_newline.cpp
* \author <NAME>
* \date 23.06.2019
*
* \brief This header contains tests for the auto_newline formatter.
*/
#define BOOST_TEST_MODULE form_auto_newline
#include <string>
#include <boost/test/unit_test.hpp>
#include <boost/log/expressions.hpp>
#include <boost/log/utility/formatting_ostream.hpp>
#include "char_definitions.hpp"
#include "make_record.hpp"
namespace logging = boost::log;
namespace expr = logging::expressions;
// Test appending a newline to a non-empty string
BOOST_AUTO_TEST_CASE_TEMPLATE(append_to_non_empty_string, CharT, char_types)
{
typedef CharT char_type;
typedef std::basic_ostringstream< char_type > ostream_type;
typedef logging::basic_formatting_ostream< char_type > formatting_ostream_type;
typedef typename formatting_ostream_type::string_type string_type;
typedef logging::record_view record_view;
typedef logging::basic_formatter< char_type > formatter;
record_view rec = make_record_view();
string_type str_fmt;
formatting_ostream_type strm_fmt(str_fmt);
formatter f = expr::stream << "Hello" << expr::auto_newline;
f(rec, strm_fmt);
ostream_type strm_correct;
strm_correct << "Hello\n";
BOOST_CHECK(equal_strings(strm_fmt.str(), strm_correct.str()));
}
// Test appending a newline to an empty string
BOOST_AUTO_TEST_CASE_TEMPLATE(append_to_empty_string, CharT, char_types)
{
typedef CharT char_type;
typedef std::basic_ostringstream< char_type > ostream_type;
typedef logging::basic_formatting_ostream< char_type > formatting_ostream_type;
typedef typename formatting_ostream_type::string_type string_type;
typedef logging::record_view record_view;
typedef logging::basic_formatter< char_type > formatter;
record_view rec = make_record_view();
string_type str_fmt;
formatting_ostream_type strm_fmt(str_fmt);
formatter f = expr::stream << expr::auto_newline;
f(rec, strm_fmt);
ostream_type strm_correct;
strm_correct << "\n";
BOOST_CHECK(equal_strings(strm_fmt.str(), strm_correct.str()));
}
// Test not appending a newline to a non-empty string which already ends with a newline
BOOST_AUTO_TEST_CASE_TEMPLATE(not_append_if_ends_with_a_newline, CharT, char_types)
{
typedef CharT char_type;
typedef std::basic_ostringstream< char_type > ostream_type;
typedef logging::basic_formatting_ostream< char_type > formatting_ostream_type;
typedef typename formatting_ostream_type::string_type string_type;
typedef logging::record_view record_view;
typedef logging::basic_formatter< char_type > formatter;
record_view rec = make_record_view();
string_type str_fmt;
formatting_ostream_type strm_fmt(str_fmt);
formatter f = expr::stream << "Hello\n" << expr::auto_newline;
f(rec, strm_fmt);
ostream_type strm_correct;
strm_correct << "Hello\n";
BOOST_CHECK(equal_strings(strm_fmt.str(), strm_correct.str()));
}
| 1,217 |
359 | //
// Copyright 2021 The Matrix.org Foundation C.I.C
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import "MXLogObjcWrapper.h"
#define MXLogVerbose(message, ...) { \
[MXLogObjcWrapper logVerbose:[NSString stringWithFormat: message, ##__VA_ARGS__] file:@__FILE__ function:[NSString stringWithFormat:@"%s", __FUNCTION__] line:__LINE__]; \
}
#define MXLogDebug(message, ...) { \
[MXLogObjcWrapper logDebug:[NSString stringWithFormat: message, ##__VA_ARGS__] file:@__FILE__ function:[NSString stringWithFormat:@"%s", __FUNCTION__] line:__LINE__]; \
}
#define MXLogInfo(message, ...) { \
[MXLogObjcWrapper logInfo:[NSString stringWithFormat: message, ##__VA_ARGS__] file:@__FILE__ function:[NSString stringWithFormat:@"%s", __FUNCTION__] line:__LINE__]; \
}
#define MXLogWarning(message, ...) { \
[MXLogObjcWrapper logWarning:[NSString stringWithFormat: message, ##__VA_ARGS__] file:@__FILE__ function:[NSString stringWithFormat:@"%s", __FUNCTION__] line:__LINE__]; \
}
#define MXLogError(message, ...) { \
[MXLogObjcWrapper logError:[NSString stringWithFormat: message, ##__VA_ARGS__] file:@__FILE__ function:[NSString stringWithFormat:@"%s", __FUNCTION__] line:__LINE__]; \
}
| 564 |
2,329 | <filename>shenyu-admin/src/main/java/org/apache/shenyu/admin/service/impl/PluginHandleServiceImpl.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shenyu.admin.service.impl;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.shenyu.admin.aspect.annotation.Pageable;
import org.apache.shenyu.admin.mapper.PluginHandleMapper;
import org.apache.shenyu.admin.mapper.ShenyuDictMapper;
import org.apache.shenyu.admin.model.dto.PluginHandleDTO;
import org.apache.shenyu.admin.model.entity.BaseDO;
import org.apache.shenyu.admin.model.entity.PluginHandleDO;
import org.apache.shenyu.admin.model.event.plugin.BatchPluginDeletedEvent;
import org.apache.shenyu.admin.model.page.CommonPager;
import org.apache.shenyu.admin.model.page.PageResultUtils;
import org.apache.shenyu.admin.model.query.PluginHandleQuery;
import org.apache.shenyu.admin.model.vo.PluginHandleVO;
import org.apache.shenyu.admin.model.vo.ShenyuDictVO;
import org.apache.shenyu.admin.service.PluginHandleService;
import org.apache.shenyu.admin.service.publish.PluginHandleEventPublisher;
import org.apache.shenyu.admin.utils.ListUtil;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* Implementation of the {@link org.apache.shenyu.admin.service.PluginHandleService}.
*/
@Service
public class PluginHandleServiceImpl implements PluginHandleService {
private static final int SELECT_BOX_DATA_TYPE = 3;
private final PluginHandleMapper pluginHandleMapper;
private final ShenyuDictMapper shenyuDictMapper;
private final PluginHandleEventPublisher eventPublisher;
public PluginHandleServiceImpl(final PluginHandleMapper pluginHandleMapper,
final ShenyuDictMapper shenyuDictMapper,
final PluginHandleEventPublisher eventPublisher) {
this.pluginHandleMapper = pluginHandleMapper;
this.shenyuDictMapper = shenyuDictMapper;
this.eventPublisher = eventPublisher;
}
@Override
@Pageable
public CommonPager<PluginHandleVO> listByPage(final PluginHandleQuery pluginHandleQuery) {
List<PluginHandleDO> pluginHandleDOList = pluginHandleMapper.selectByQuery(pluginHandleQuery);
return PageResultUtils.result(pluginHandleQuery.getPageParameter(), () -> this.buildPluginHandleVO(pluginHandleDOList));
}
@Override
public Integer create(final PluginHandleDTO pluginHandleDTO) {
PluginHandleDO pluginHandleDO = PluginHandleDO.buildPluginHandleDO(pluginHandleDTO);
int pluginHandleCount = pluginHandleMapper.insertSelective(pluginHandleDO);
if (pluginHandleCount > 0) {
eventPublisher.onCreated(pluginHandleDO);
}
return pluginHandleCount;
}
@Override
public Integer update(final PluginHandleDTO pluginHandleDTO) {
PluginHandleDO pluginHandleDO = PluginHandleDO.buildPluginHandleDO(pluginHandleDTO);
final PluginHandleDO before = pluginHandleMapper.selectById(pluginHandleDTO.getId());
int pluginHandleCount = pluginHandleMapper.updateByPrimaryKeySelective(pluginHandleDO);
if (pluginHandleCount > 0) {
eventPublisher.onUpdated(pluginHandleDO, before);
}
return pluginHandleCount;
}
@Override
public Integer deletePluginHandles(final List<String> ids) {
if (CollectionUtils.isEmpty(ids)) {
return 0;
}
final List<PluginHandleDO> handles = pluginHandleMapper.selectByIdList(ids);
final int count = pluginHandleMapper.deleteByIdList(ids);
if (count > 0) {
eventPublisher.onDeleted(handles);
}
return count;
}
@Override
public PluginHandleVO findById(final String id) {
return buildPluginHandleVO(pluginHandleMapper.selectById(id));
}
@Override
public List<PluginHandleVO> list(final String pluginId, final Integer type) {
List<PluginHandleDO> pluginHandleDOList = pluginHandleMapper.selectByQuery(PluginHandleQuery.builder()
.pluginId(pluginId)
.type(type)
.build());
return buildPluginHandleVO(pluginHandleDOList);
}
/**
* The associated Handle needs to be deleted synchronously.
*
* @param event event
*/
@EventListener(value = BatchPluginDeletedEvent.class)
public void onPluginDeleted(final BatchPluginDeletedEvent event) {
deletePluginHandles(ListUtil.map(pluginHandleMapper.selectByPluginIdList(event.getDeletedPluginIds()), BaseDO::getId));
}
private PluginHandleVO buildPluginHandleVO(final PluginHandleDO pluginHandleDO) {
List<ShenyuDictVO> dictOptions = null;
if (pluginHandleDO.getDataType() == SELECT_BOX_DATA_TYPE) {
dictOptions = ListUtil.map(shenyuDictMapper.findByType(pluginHandleDO.getField()), ShenyuDictVO::buildShenyuDictVO);
}
return PluginHandleVO.buildPluginHandleVO(pluginHandleDO, dictOptions);
}
private List<PluginHandleVO> buildPluginHandleVO(final List<PluginHandleDO> pluginHandleDOList) {
List<String> fieldList = pluginHandleDOList.stream()
.filter(pluginHandleDO -> pluginHandleDO.getDataType() == SELECT_BOX_DATA_TYPE)
.map(PluginHandleDO::getField)
.distinct()
.collect(Collectors.toList());
Map<String, List<ShenyuDictVO>> shenyuDictMap = CollectionUtils.isNotEmpty(fieldList)
? Optional.ofNullable(shenyuDictMapper.findByTypeBatch(fieldList))
.orElseGet(ArrayList::new)
.stream()
.map(ShenyuDictVO::buildShenyuDictVO)
.collect(Collectors.groupingBy(ShenyuDictVO::getType))
: new HashMap<>(0);
return pluginHandleDOList.stream()
.map(pluginHandleDO -> {
List<ShenyuDictVO> dictOptions = shenyuDictMap.get(pluginHandleDO.getField());
return PluginHandleVO.buildPluginHandleVO(pluginHandleDO, dictOptions);
})
.collect(Collectors.toList());
}
}
| 2,730 |
335 | <reponame>Safal08/Hacktoberfest-1
{
"word": "Tan",
"definitions": [
"(of a pale-skinned person or their skin) become brown or browner after exposure to the sun.",
"(of the sun) cause (a pale-skinned person or their skin) to become brown or browner.",
"Convert (animal skin) into leather by soaking in a liquid containing tannic acid, or by the use of other chemicals.",
"Beat (someone) repeatedly as a punishment."
],
"parts-of-speech": "Verb"
} | 174 |
1,338 | <reponame>Kirishikesan/haiku
#ifndef StringSearchTest_H
#define StringSearchTest_H
#include "TestCase.h"
#include <String.h>
class StringSearchTest : public BTestCase
{
private:
protected:
public:
static Test *suite(void);
void PerformTest(void);
StringSearchTest(std::string name = "");
virtual ~StringSearchTest();
};
#endif
| 126 |
335 | {
"word": "Milk",
"definitions": [
"An opaque white fluid rich in fat and protein, secreted by female mammals for the nourishment of their young.",
"The milk from cows (or goats or sheep) as consumed by humans.",
"The white juice of certain plants.",
"A creamy-textured liquid with a particular ingredient or use."
],
"parts-of-speech": "Noun"
} | 137 |
2,441 | <reponame>islandev/tablesaw
package tech.tablesaw.filtering;
import com.google.common.annotations.Beta;
import java.util.function.Function;
import tech.tablesaw.api.Table;
import tech.tablesaw.selection.Selection;
@Beta
public class Not implements Function<Table, Selection> {
private Function<Table, Selection> argument;
public Not(Function<Table, Selection> argument) {
this.argument = argument;
}
@Override
public Selection apply(Table table) {
return argument.apply(table).flip(0, table.rowCount());
}
}
| 167 |
330 | <filename>src/method_table_symbol.h
/* Auto generated by make_method_table.rb */
#include "symbol_builtin.h"
struct RClass *mrbc_init_class_symbol(struct VM *vm)
{
static const mrbc_sym method_symbols[] = {
MRBC_SYMID_all_symbols,
#if MRBC_USE_STRING
MRBC_SYMID_id2name,
#endif
#if MRBC_USE_STRING
MRBC_SYMID_inspect,
#endif
#if MRBC_USE_STRING
MRBC_SYMID_to_s,
#endif
MRBC_SYMID_to_sym,
};
static const mrbc_func_t method_functions[] = {
c_all_symbols,
#if MRBC_USE_STRING
c_to_s,
#endif
#if MRBC_USE_STRING
c_inspect,
#endif
#if MRBC_USE_STRING
c_to_s,
#endif
c_ineffect,
};
return mrbc_define_builtin_class("Symbol", mrbc_class_object, method_symbols, method_functions, sizeof(method_symbols)/sizeof(mrbc_sym) );
}
| 361 |
304 | """Groupby aggregation benchmark."""
import time
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import pandas as pd
import cudf
from xfeat.helper import aggregation
def main():
key_nunique = 5_000
n_samples = 10_000_000
records = []
for i in range(1, 8):
df = pd.DataFrame({
"group_value": np.random.randint(-1, 1, n_samples * i),
"group_key": np.random.randint(0, key_nunique, n_samples * i),
})
df_cudf = cudf.from_pandas(df)
# Pandas version
time_records = []
for _ in range(5):
t = time.process_time()
aggregation(df, "group_key", ["group_value"], ["mean"])
elapsed_time = time.process_time() - t
time_records.append(elapsed_time)
records.append({
"n_samples": n_samples * i,
"n_unique_keys": key_nunique,
"process_time_mean": np.mean(time_records),
"process_time_std": np.std(time_records),
"method": "CPU-pandas",
})
# cuDF version
time_records = []
for _ in range(5):
t = time.process_time()
aggregation(df_cudf, "group_key", ["group_value"], ["mean"])
elapsed_time = time.process_time() - t
time_records.append(elapsed_time)
records.append({
"n_samples": n_samples * i,
"n_unique_keys": key_nunique,
"process_time_mean": np.mean(time_records),
"process_time_std": np.std(time_records),
"method": "GPU-cuDF",
})
pd.DataFrame(records).to_csv("./benchmark_feature_aggregation.csv", index=False)
def plot():
df = pd.read_csv("benchmark_feature_aggregation.csv")
df["n_samples"] = df["n_samples"] / 1000.0 / 1000.0 / 10.0
sns.set(style="whitegrid")
fig, ax = plt.subplots(figsize=(12, 6))
plt.title("Benchmark groupby aggregation (xfeat.helper.aggregate)",
fontsize=24, pad=24)
ax = sns.barplot(
x="n_samples",
y="process_time_mean",
hue="method",
data=df,
ax=ax,
palette=["#c7c7c7", "#ce76de"],
)
ax.set_ylabel("process time [sec, log]", fontsize=24)
ax.set_xlabel("num samples [*1e7]", fontsize=24)
ax.set_yscale("log")
plt.setp(ax.get_xticklabels(), fontsize=18)
plt.setp(ax.get_yticklabels(), fontsize=18)
ax.legend(loc=0, fontsize=20)
fig.autofmt_xdate(rotation=20)
fig.patch.set_facecolor("white")
plt.tight_layout()
sns.despine(left=True, bottom=True)
plt.savefig("./_docs/benchmark_groupby_aggregation.png")
if __name__ == '__main__':
main()
plot()
| 1,332 |
778 | <gh_stars>100-1000
import KratosMultiphysics as Kratos
import KratosMultiphysics.mpi as MPI #TODO: do not import the so directly (but I need a nice Python module first)
import KratosMultiphysics.KratosUnittest as UnitTest
class TestMPICommunicatorSetUp(UnitTest.TestCase):
def setUp(self):
self.model = Kratos.Model()
def testMPICommunicatorSetUp(self):
model_part = self.model.CreateModelPart("Test_model_part",1)
# I'd do more complex tests, but this one should work in serial too (JC)
self.assertNotRegex(model_part.GetCommunicator().__str__(), "MPICommunicator")
MPI.ModelPartCommunicatorUtilities.SetMPICommunicator(model_part)
self.assertRegex(model_part.GetCommunicator().__str__(), "MPICommunicator")
if __name__ == "__main__":
UnitTest.main()
| 309 |
990 | <reponame>SeirousLee/example-code-2e
from tree import tree
def test_1_level():
class One: pass
expected = [('One', 0)]
result = list(tree(One))
assert expected == result
def test_2_levels_2_leaves():
class Branch: pass
class Leaf1(Branch): pass
class Leaf2(Branch): pass
expected = [
('Branch', 0),
('Leaf1', 1),
('Leaf2', 1),
]
result = list(tree(Branch))
assert expected == result
def test_3_levels_1_leaf():
class X: pass
class Y(X): pass
class Z(Y): pass
expected = [
('X', 0),
('Y', 1),
('Z', 2),
]
result = list(tree(X))
assert expected == result
| 326 |
1,056 | <filename>java/java.project/src/org/netbeans/spi/java/project/support/JavadocAndSourceRootDetection.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.spi.java.project.support;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.netbeans.api.annotations.common.CheckForNull;
import org.netbeans.api.annotations.common.NonNull;
import org.netbeans.api.annotations.common.NullAllowed;
import org.netbeans.api.annotations.common.SuppressWarnings;
import org.netbeans.api.queries.VisibilityQuery;
import org.netbeans.modules.classfile.ClassFile;
import org.netbeans.modules.classfile.ClassName;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.util.Parameters;
/**
* Miscellaneous helper utils to detect Javadoc root folder, source root folder or
* package of the given java or class file.
*
* @since org.netbeans.modules.java.project/1 1.20
*/
public class JavadocAndSourceRootDetection {
private static final int JAVADOC_TRAVERSE_DEEPTH = 7;
private static final int SRC_TRAVERSE_DEEPTH = 50;
private static final Logger LOG = Logger.getLogger(JavadocAndSourceRootDetection.class.getName());
private JavadocAndSourceRootDetection() {
}
/**
* Finds Javadoc root inside of given folder.
*
* @param fo base folder to start search in; routine will traverse 5 folders
* deep before giving up; cannot be null; must be folder
* @return found Javadoc root or null if none found
*/
public static FileObject findJavadocRoot(FileObject baseFolder) {
Parameters.notNull("baseFolder", baseFolder);
if (!baseFolder.isFolder()) {
throw new IllegalArgumentException("baseFolder must be folder: "+baseFolder); // NOI18N
}
final Set<FileObject> result = new HashSet<>();
findAllJavadocRoots(
baseFolder,
result,
null,
true,
0);
assert (result.size() & 0xFFFFFFFE) == 0;
final Iterator<FileObject> it = result.iterator();
return it.hasNext() ?
it.next():
null;
}
/**
* Finds all javadoc roots under the given base folder.
* @param baseFolder the base folder to start search in; routine will traverse 5 folders
* @param canceled the canceling support
* @return the found javadoc roots
* @since 1.56
*/
@NonNull
public static Set<? extends FileObject> findJavadocRoots(
@NonNull final FileObject baseFolder,
@NullAllowed final AtomicBoolean canceled) {
Parameters.notNull("folder", baseFolder); //NOI18N
if (!baseFolder.isFolder()) {
throw new IllegalArgumentException ("baseFolder must be folder: " + baseFolder); //NOI18N
}
final Set<FileObject> result = new TreeSet<>((f1,f2) -> {
final String f1p = FileUtil.getRelativePath(baseFolder, f1);
final String f2p = FileUtil.getRelativePath(baseFolder, f2);
return f1p.compareTo(f2p);
});
findAllJavadocRoots(
baseFolder,
result,
canceled,
false,
0);
return Collections.unmodifiableSet(result);
}
/**
* Finds Java sources root inside of given folder.
*
* @param fo base folder to start search in; routine will traverse subfolders
* to find a Java file to detect package root; cannot be null; must be folder
* @return found package root of first Java file found or null if none found
*/
public static FileObject findSourceRoot(FileObject fo) {
Parameters.notNull("fo", fo);
if (!fo.isFolder()) {
throw new IllegalArgumentException("fo must be folder - "+fo); // NOI18N
}
FileObject root = findJavaSourceFile(fo, 0);
if (root != null) {
return findPackageRoot(root);
}
return null;
}
/**
* Finds Java sources roots inside of given folder.
*
* @param folder to start search in; routine will traverse subfolders
* to find a Java file to detect package root; cannot be null; must be folder
* @param canceled if set to true the method immediately returns roots it has already found,
* may be null
* @return {@link Collection} of found package roots
* @since 1.31
*/
public static Set<? extends FileObject> findSourceRoots(final @NonNull FileObject folder, final @NullAllowed AtomicBoolean canceled) {
Parameters.notNull("folder", folder); //NOI18N
if (!folder.isValid()) {
throw new IllegalArgumentException("Folder: " + FileUtil.getFileDisplayName(folder)+" is not valid."); //NOI18N
}
if (!folder.isFolder()) {
throw new IllegalArgumentException("The parameter: " + FileUtil.getFileDisplayName(folder) + " has to be a directory."); //NOI18N
}
final Set<FileObject> result = new HashSet<FileObject>();
findAllSourceRoots(folder, result, canceled, 0);
return Collections.unmodifiableSet(result);
}
/**
* Returns package root of the given java or class file.
*
* @param fo either .java or .class file; never null
* @return package root of the given file or null if none found
*/
public static FileObject findPackageRoot(final FileObject fo) {
if ("java".equals(fo.getExt())) { // NOI18N
return findJavaPackage (fo);
} else if ("class".equals(fo.getExt())) { // NOI18N
return findClassPackage (fo);
} else {
throw new IllegalArgumentException("only java or class files accepted "+fo); // NOI18N
}
}
private static FileObject findAllSourceRoots(final FileObject folder, final Collection<? super FileObject> result,
final AtomicBoolean canceled, final int depth) {
if (depth == SRC_TRAVERSE_DEEPTH) {
return null;
}
if (!VisibilityQuery.getDefault().isVisible(folder)) {
return null;
}
if (isRecursiveSymLink(folder)) {
return null;
}
final FileObject[] children = folder.getChildren();
for (FileObject child : children) {
if (canceled != null && canceled.get()) {
return null;
} else if (child.isData() && "text/x-java".equals(FileUtil.getMIMEType(child, "text/x-java"))) { //NOI18N
final FileObject root = findPackageRoot(child);
if (root != null) {
result.add(root);
}
return root;
} else if (child.isFolder()) {
final FileObject upTo = findAllSourceRoots(child, result, canceled, depth+1);
if (upTo != null && !upTo.equals(child)) {
return upTo;
}
}
}
return null;
}
private static boolean isRecursiveSymLink(@NonNull final FileObject folder) {
try {
return FileUtil.isRecursiveSymbolicLink(folder);
} catch (IOException ioe) {
LOG.log(
Level.WARNING,
"Cannot read link: {0}, reason: {1}", //NOI18N
new Object[]{
FileUtil.getFileDisplayName(folder),
ioe.getMessage()
});
return true; //prevent O(a^n) growth
}
}
private static boolean findAllJavadocRoots(
@NonNull final FileObject folder,
@NonNull final Collection<? super FileObject> result,
@NullAllowed final AtomicBoolean cancel,
final boolean singleRoot,
final int depth) {
final FileObject pkgList = folder.getFileObject("package-list", null); // NOI18N
if (pkgList != null) {
result.add(folder);
return singleRoot;
}
if (depth == JAVADOC_TRAVERSE_DEEPTH) {
return false;
}
if (cancel != null && cancel.get()) {
return true;
}
for (FileObject file : folder.getChildren()) {
if (!file.isFolder()) {
continue;
}
if (findAllJavadocRoots(file, result, cancel, singleRoot, depth+1)) {
return true;
}
}
return false;
}
private static FileObject findJavaSourceFile(FileObject fo, int level) {
if (level == SRC_TRAVERSE_DEEPTH) {
return null;
}
if (!VisibilityQuery.getDefault().isVisible(fo)) {
return null;
}
if (isRecursiveSymLink(fo)) {
return null;
}
// go through files first:
for (FileObject fo2 : fo.getChildren()) {
if (fo2.isData() && "java".equals(fo2.getExt())) { // NOI18N
return fo2;
}
}
// now check sunfolders:
for (FileObject fo2 : fo.getChildren()) {
if (fo2.isFolder()) {
fo2 = findJavaSourceFile(fo2, level+1);
if (fo2 != null) {
return fo2;
}
}
}
return null;
}
static final Pattern JAVA_FILE, PACKAGE_INFO;
static {
String whitespace = "(?:(?://[^\n]*\n)|(?:/\\*.*?\\*/)|\\s)"; //NOI18N
String javaIdentifier = "(?:\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)"; //NOI18N
String packageStatement = "package" + whitespace + "+(" + javaIdentifier + "(?:\\." + javaIdentifier + ")*)" + whitespace + "*;"; //NOI18N
JAVA_FILE = Pattern.compile("(?ms)" + whitespace + "*" + packageStatement + ".*", Pattern.MULTILINE | Pattern.DOTALL); //NOI18N
// XXX this does not take into account annotations and imports:
PACKAGE_INFO = Pattern.compile("(?ms)(?:.*" + whitespace + ")?" + packageStatement + whitespace + "*", Pattern.MULTILINE | Pattern.DOTALL); //NOI18N
}
@SuppressWarnings({"OS_OPEN_STREAM", "RR_NOT_CHECKED"})
private static FileObject findJavaPackage(FileObject fo) {
try {
InputStream is = fo.getInputStream();
try {
// Try default encoding, probably good enough.
Reader r = new BufferedReader(new InputStreamReader(is));
r.mark(2);
char[] cbuf = new char[2];
r.read(cbuf, 0, 2);
if (cbuf[0] == 255 && cbuf[1] == 254) { // BOM
is.close();
is = fo.getInputStream();
r = new BufferedReader(new InputStreamReader(is, "Unicode")); //NOI18N
} else {
r.reset();
}
// TODO: perhaps limit and read just first 100kB and not whole file:
StringBuilder b = new StringBuilder((int) fo.getSize());
int read;
char[] buf = new char[b.length() + 1];
while ((read = r.read(buf)) != -1) {
b.append(buf, 0, read);
}
Matcher m = (fo.getNameExt().equals("package-info.java") ? PACKAGE_INFO : JAVA_FILE).matcher(b); //NOI18N
if (m.matches()) {
String pkg = m.group(1);
LOG.log(Level.FINE, "Found package declaration {0} in {1}", new Object[] {pkg, fo}); //NOI18N
return getPackageRoot(fo, pkg);
} else {
// XXX probably not a good idea to infer the default package: return f.getParentFile();
return null;
}
} finally {
is.close();
}
} catch (IOException x) {
LOG.log(
Level.INFO,
"Cannot read: {0}", //NOI18N
FileUtil.getFileDisplayName(fo));
return null;
}
}
@CheckForNull
private static FileObject getPackageRoot(@NonNull final FileObject javaOrClassFile, @NonNull final String packageName) {
final String[] path = packageName.split("\\."); //NOI18N
FileObject pkg = javaOrClassFile.getParent();
for (int i=path.length-1; i>=0; i--) {
if (!path[i].equals(pkg.getName())) {
return null;
}
pkg = pkg.getParent();
}
return pkg;
}
/**
* Find java package in side .class file.
*
* @return package or null if not found
*/
private static FileObject findClassPackage(FileObject file) {
try {
InputStream in = file.getInputStream();
try {
ClassFile cf = new ClassFile(in,false);
ClassName cn = cf.getName();
return getPackageRoot(file, cn.getPackage());
} finally {
in.close ();
}
} catch (IOException e) {
LOG.log(
Level.INFO,
"Cannot read: {0}", //NOI18N
FileUtil.getFileDisplayName(file));
}
return null;
}
}
| 6,286 |
1,444 | package mage.cards.a;
import java.util.UUID;
import mage.constants.SubType;
import mage.abilities.Ability;
import mage.abilities.common.SimpleStaticAbility;
import mage.abilities.common.delayed.AtTheBeginOfNextEndStepDelayedTriggeredAbility;
import mage.abilities.condition.common.DidNotAttackThisTurnEnchantedCondition;
import mage.abilities.decorator.ConditionalTriggeredAbility;
import mage.abilities.effects.common.AttachEffect;
import mage.abilities.effects.common.DestroyAttachedToEffect;
import mage.abilities.effects.common.continuous.GainAbilityAttachedEffect;
import mage.constants.Outcome;
import mage.target.TargetPermanent;
import mage.abilities.keyword.EnchantAbility;
import mage.abilities.keyword.FirstStrikeAbility;
import mage.abilities.keyword.TrampleAbility;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.AttachmentType;
import mage.constants.CardType;
import mage.constants.TargetController;
import mage.constants.Zone;
import mage.filter.common.FilterCreaturePermanent;
import mage.filter.predicate.Predicates;
import mage.watchers.common.AttackedThisTurnWatcher;
/**
*
* @author jeffwadsworth
*/
public final class Aggression extends CardImpl {
private static final FilterCreaturePermanent filter = new FilterCreaturePermanent();
static {
filter.add(Predicates.not(SubType.WALL.getPredicate()));
}
public Aggression(UUID ownerId, CardSetInfo setInfo) {
super(ownerId, setInfo, new CardType[]{CardType.ENCHANTMENT}, "{2}{R}");
this.subtype.add(SubType.AURA);
// Enchant non-Wall creature
TargetPermanent auraTarget = new TargetPermanent(filter);
this.getSpellAbility().addTarget(auraTarget);
this.getSpellAbility().addEffect(new AttachEffect(Outcome.BoostCreature));
Ability ability = new EnchantAbility(auraTarget.getTargetName());
this.addAbility(ability);
// Enchanted creature has first strike and trample.
Ability ability2 = new SimpleStaticAbility(
Zone.BATTLEFIELD,
new GainAbilityAttachedEffect(
FirstStrikeAbility.getInstance(),
AttachmentType.AURA));
ability2.addEffect(new GainAbilityAttachedEffect(
TrampleAbility.getInstance(),
AttachmentType.AURA));
this.addAbility(ability2);
// At the beginning of the end step of enchanted creature's controller, destroy that creature if it didn't attack this turn.
this.addAbility(new ConditionalTriggeredAbility(
new AtTheBeginOfNextEndStepDelayedTriggeredAbility(
new DestroyAttachedToEffect("enchanted"),
TargetController.CONTROLLER_ATTACHED_TO),
DidNotAttackThisTurnEnchantedCondition.instance,
"At the beginning of the end step of enchanted creature's controller, destroy that creature if it didn't attack this turn."),
new AttackedThisTurnWatcher());
}
private Aggression(final Aggression card) {
super(card);
}
@Override
public Aggression copy() {
return new Aggression(this);
}
}
| 1,152 |
5,169 | <gh_stars>1000+
{
"name": "MediaLibrary",
"version": "0.0.3",
"summary": "图片、视频选择器",
"description": "查看了Apple官方Demo后,高仿微信的图片、视频选择器",
"homepage": "https://github.com/SwiftPartner",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"ryan": "<EMAIL>"
},
"social_media_url": "https://www.jianshu.com/u/ddf4eb832e80",
"platforms": {
"ios": "9.0"
},
"swift_version": "4.2",
"source": {
"git": "https://github.com/SwiftPartner/MediaLibrary.git",
"tag": "0.0.3"
},
"source_files": "Classes/**/*.{h,m,swift}",
"exclude_files": "Classes/Exclude",
"resources": "Resources/**/*"
}
| 341 |
549 | <gh_stars>100-1000
package net.notejam.spring.security.owner;
import static org.mockito.Mockito.when;
import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.security.access.AccessDeniedException;
import net.notejam.spring.pad.Pad;
import net.notejam.spring.user.User;
import net.notejam.spring.user.UserService;
/**
* A test for PermitOwnerAspect
*
* @author <EMAIL>
* @see <a href="bitcoin:1335STSwu9hST4vcMRppEPgENMHD2r1REK">Donations</a>
*/
@RunWith(MockitoJUnitRunner.class)
public class PermitOwnerAspectTest {
@Mock
private UserService userService;
/**
* The SUT
*/
private PermitOwnerAspect aspect;
@Before
public void setup() {
aspect = new PermitOwnerAspect();
aspect.setUserService(userService);
}
/**
* Tests authorizeReturn() permits Null
*/
@Test
public void testAuthorizeReturnShouldPermitNull() {
aspect.authorizeReturn(null);
aspect.authorizeReturn(Optional.ofNullable(null));
}
/**
* Tests authorizeReturn() denies anonymous access.
*/
@Test(expected = AccessDeniedException.class)
public void testAuthorizeReturnDeniesAnonymous() {
Pad owned = new Pad();
owned.setUser(new User());
when(userService.getAuthenticatedUser()).thenReturn(null);
aspect.authorizeReturn(owned);
}
/**
* Tests authorizeReturn() denies access to other users.
*/
@Test(expected = AccessDeniedException.class)
public void testAuthorizeReturnDeniesOtherUser() {
Pad owned = new Pad();
owned.setUser(new User());
when(userService.getAuthenticatedUser()).thenReturn(new User());
aspect.authorizeReturn(owned);
}
/**
* Tests authorizeReturn() permits access to the owner.
*/
@Test
public void testAuthorizeReturnPermitsOwner() {
User owner = new User();
Pad owned = new Pad();
owned.setUser(owner);
when(userService.getAuthenticatedUser()).thenReturn(owner);
aspect.authorizeReturn(owned);
}
}
| 846 |
5,169 | {
"name": "TestAlert",
"version": "0.0.1",
"summary": "A short demo description of TestAlert.",
"description": "This is Sally's testAlert demo. Welcome here.",
"homepage": "https://git.silvrr.com/Sallly/TestAlert",
"license": {
"type": "MIT",
"file": "FILE_LICENSE"
},
"authors": {
"Sally": "<EMAIL>"
},
"platforms": {
"ios": "9.0"
},
"source": {
"git": "<EMAIL>:Sallly/TestAlert.git",
"tag": "0.0.1"
},
"source_files": [
"TestAlert",
"TestAlert/Example/**/*"
],
"frameworks": [
"Foundation",
"UIKit"
],
"dependencies": {
"Masonry": [
]
}
}
| 278 |
526 | /* SPDX-License-Identifier: Apache 2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
package org.odpi.openmetadata.adapters.connectors.governanceactions.remediation;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.odpi.openmetadata.adapters.connectors.governanceactions.ffdc.GovernanceActionConnectorsErrorCode;
import org.odpi.openmetadata.frameworks.connectors.ffdc.*;
import org.odpi.openmetadata.frameworks.governanceaction.OpenMetadataStore;
import org.odpi.openmetadata.frameworks.governanceaction.RemediationGovernanceActionService;
import org.odpi.openmetadata.frameworks.governanceaction.properties.*;
import org.odpi.openmetadata.frameworks.governanceaction.search.ElementProperties;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* OriginSeekerGovernanceActionConnector uses the lineage mapping relationships to determine the origin of the asset that is passed
* as an action target. It follows each lineage path, collecting the AssetOrigin classifications. If there is only one, it is
* added to the action target asset and the output guard is set to origin-assigned. If there are multiple origin's identified they are
* added to the detectedOrigins request parameter and the output guard is multiple-origins-detected.
* If no AssetOrigin classifications are found, the output guard is no-origins-detected.
*
* Note: This implementation currently only follows LineageMapping links between assets, it needs extending to support lineage mapping
* between ports and schema attributes.
*/
public class OriginSeekerGovernanceActionConnector extends RemediationGovernanceActionService
{
private static final String assetOriginClassification = "AssetOrigin";
private static final String detectedOriginsProperty = "detectedOrigins";
/**
* Indicates that the governance action service is completely configured and can begin processing.
*
* This is a standard method from the Open Connector Framework (OCF) so
* be sure to call super.start() at the start of your overriding version.
*
* @throws ConnectorCheckedException there is a problem within the governance action service.
*/
@Override
public void start() throws ConnectorCheckedException
{
final String methodName = "start";
super.start();
List<String> outputGuards = new ArrayList<>();
CompletionStatus completionStatus = null;
Map<String, String> newRequestParameters = null;
try
{
if (governanceContext.getActionTargetElements() == null)
{
completionStatus = CompletionStatus.FAILED;
outputGuards.add(OriginSeekerGovernanceActionProvider.NO_TARGETS_DETECTED_GUARD);
}
else if (governanceContext.getActionTargetElements().size() == 1)
{
ActionTargetElement actionTarget = governanceContext.getActionTargetElements().get(0);
OpenMetadataElement targetElement = actionTarget.getTargetElement();
/*
* Check that the AssetOrigin classification is not already set - this is an error if it is.
*/
ElementClassification existingAssetOriginClassification = this.getAssetOriginClassification(targetElement);
if (existingAssetOriginClassification != null)
{
completionStatus = CompletionStatus.ACTIONED;
outputGuards.add(OriginSeekerGovernanceActionProvider.ORIGIN_ALREADY_ASSIGNED_GUARD);
}
if (completionStatus == null)
{
/*
* No current AssetOrigin classification is present so ok to begin seeking the origin through the lineage.
* This method returns a list of origin classifications detected from walking the lineage tree.
* The returned list has been deduplicated.
*/
List<String> coveredEntityGUIDs = new ArrayList<>();
coveredEntityGUIDs.add(targetElement.getElementGUID());
List<ElementProperties> originClassifications = this.getOrigins(targetElement, coveredEntityGUIDs);
if (originClassifications == null)
{
/*
* No origin classifications have been detected which means the guard needs to be set so that a manual assignment
* can be initiated.
*/
outputGuards.add(OriginSeekerGovernanceActionProvider.NO_ORIGINS_DETECTED_GUARD);
completionStatus = CompletionStatus.INVALID;
}
else if (originClassifications.size() == 1)
{
/*
* A single origin has been found so it is ok to add it to the action target asset.
*/
governanceContext.classifyMetadataElement(targetElement.getElementGUID(),
assetOriginClassification,
false,
false,
originClassifications.get(0),
new Date());
outputGuards.add(OriginSeekerGovernanceActionProvider.ORIGIN_ASSIGNED_GUARD);
completionStatus = CompletionStatus.ACTIONED;
}
else /* multiple origins to choose from */
{
/*
* There are multiple possible origin classifications to use. This is going to need a manual assignment and so
* the different origin values are added to the request parameters that will be added to the request parameters to
* make it easier for the steward to understand the origins found in the lineage.
*/
newRequestParameters = new HashMap<>();
ObjectMapper objectMapper = new ObjectMapper();
String jsonString = objectMapper.writeValueAsString(originClassifications);
newRequestParameters.put(detectedOriginsProperty, jsonString);
outputGuards.add(OriginSeekerGovernanceActionProvider.MULTIPLE_ORIGINS_DETECTED_GUARD);
completionStatus = CompletionStatus.INVALID;
}
}
}
else
{
/*
* Multiple action targets to supply. This governance action does not support multiple action targets because the
* result of the origin search could be different for each action target and so it would be difficult to automate the response.
*/
completionStatus = CompletionStatus.FAILED;
outputGuards.add(OriginSeekerGovernanceActionProvider.MULTIPLE_TARGETS_DETECTED_GUARD);
}
governanceContext.recordCompletionStatus(completionStatus, outputGuards, newRequestParameters, null);
}
catch (OCFCheckedExceptionBase error)
{
throw new ConnectorCheckedException(error.getReportedErrorMessage(), error);
}
catch (Exception error)
{
throw new ConnectorCheckedException(GovernanceActionConnectorsErrorCode.UNEXPECTED_EXCEPTION.getMessageDefinition(governanceServiceName,
error.getClass().getName(),
error.getMessage()),
error.getClass().getName(),
methodName,
error);
}
}
/**
* Return the AssetOrigin classification from an asset entity (if set).
*
* @param asset asset element to check
* @return null or located AssetOrigin classification
*/
private ElementClassification getAssetOriginClassification(OpenMetadataElement asset)
{
List<ElementClassification> existingClassifications = asset.getClassifications();
if (existingClassifications != null)
{
for (ElementClassification existingClassification : existingClassifications)
{
if (existingClassification != null)
{
if (assetOriginClassification.equals(existingClassification.getClassificationName()))
{
return existingClassification;
}
}
}
}
return null;
}
/**
* Extract the path name located in the properties of the the supplied asset metadata element (either a FileFolder or DataFile).
* It looks first in the linked connection endpoint. If this is not available then the qualified name of the asset is used.
*
* @param asset metadata element
* @return pathname
*/
private List<ElementProperties> getOrigins(OpenMetadataElement asset,
List<String> coveredEntityGUIDs) throws Exception
{
final String lineageMappingRelationshipName = "LineageMapping";
List<ElementProperties> results = new ArrayList<>();
/*
* The lineage is explored by repeatedly retrieving the lineage from the metadata store.
*/
OpenMetadataStore store = governanceContext.getOpenMetadataStore();
/*
* Retrieving from end 2 means it is working upstream on the lineage relationships.
* Note this is only working with lineage relationships between Assets. It would need
* extending to work with lineage between ports and schema elements.
*/
List<RelatedMetadataElement> lineageLinks = store.getRelatedMetadataElements(asset.getElementGUID(),
2,
lineageMappingRelationshipName,
true,
false,
null,
0,
0);
if ((lineageLinks != null) && (! lineageLinks.isEmpty()))
{
/*
* Explore each branch in the lineage map.
*/
for (RelatedMetadataElement lineageLink : lineageLinks)
{
if (lineageLink != null)
{
OpenMetadataElement nextAsset = lineageLink.getElementProperties();
/*
* Some lineage graphs are circular so the covered entity guids prevents the same element from being processed twice.
*/
if (! coveredEntityGUIDs.contains(nextAsset.getElementGUID()))
{
coveredEntityGUIDs.add(nextAsset.getElementGUID());
/*
* If we find an origin classification on this asset we stop traversing the lineage graph.
*/
ElementClassification existingAssetOriginClassification = this.getAssetOriginClassification(nextAsset);
if (existingAssetOriginClassification == null)
{
/*
* No origin classification so it must look further back in the lineage graph.
*/
List<ElementProperties> upstreamResults = getOrigins(nextAsset, coveredEntityGUIDs);
if ((upstreamResults != null) && (!upstreamResults.isEmpty()))
{
/*
* Now it is necessary to merge and deduplicate the results.
*/
for (ElementProperties upstreamResult : upstreamResults)
{
if (upstreamResult != null)
{
if (!results.contains(upstreamResult))
{
results.add(upstreamResult);
}
}
}
}
}
else
{
/*
* There is an origin classification so process it.
*/
if (existingAssetOriginClassification.getClassificationProperties() != null)
{
if (! results.contains(existingAssetOriginClassification.getClassificationProperties()))
{
results.add(existingAssetOriginClassification.getClassificationProperties());
}
}
}
}
}
}
}
if (results.isEmpty())
{
return null;
}
return results;
}
}
| 7,217 |
6,717 | // $Id: ClDouble.java,v 1.8 1999/04/20 00:26:27 gjb Exp $
//
// Cassowary Incremental Constraint Solver
// Original Smalltalk Implementation by <NAME>
// This Java Implementation by <NAME>, <<EMAIL>>
// http://www.cs.washington.edu/homes/gjb
// (C) 1998, 1999 <NAME> and <NAME>
// See ../LICENSE for legal details regarding this software
//
// ClDouble
//
package EDU.Washington.grad.gjb.cassowary;
public class ClDouble extends Number
{
public ClDouble(double val)
{ value = val; }
public ClDouble()
{ this(0.0); }
public final Object clone()
{ return new ClDouble(value); }
public final double doubleValue()
{ return value; }
public final int intValue()
{ return (int) value; }
public final long longValue()
{ return (long) value; }
public final float floatValue()
{ return (float) value; }
public final byte byteValue()
{ return (byte) value; }
public final short shortValue()
{ return (short) value; }
public final void setValue(double val)
{ value = val; }
public final String toString()
{ return java.lang.Double.toString(value); }
public final boolean equals(Object o)
{
try {
return value == ((ClDouble) o).value;
} catch (Exception err) {
return false;
}
}
public final int hashCode()
{
System.err.println("ClDouble.hashCode() called!");
return (int) java.lang.Double.doubleToLongBits(value);
}
private double value;
}
| 513 |
13,885 | <gh_stars>1000+
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <deque>
#include <functional>
#include <mutex>
/**
* A simple thread-safe job queue.
*
* JobQueue allows to push "jobs" (i.e.: code and its state) into a queue and later execute these
* job in FIFO order, possibly (and generally) from another thread.
*
* example:
* @code
* using utils::JobQueue;
* static JobQueue sJobs;
*
* struct Foo {
* void bar();
* void baz(int);
* } foo;
*
* sJobs.push([]() {
* // one can use a lambda
* });
*
* sJobs.push(&Foo::bar, foo) {
* // ...or a method of a class
* }
*
* sJobs.push(&Foo::bar, baz, 42) {
* // ... even a method with arguments
* }
*
* // Later, in another thread for instance...
*
* // empty the queue and runs all jobs in FIFO order
* sJobs.runAllJobs();
*
* // runs the oldest job if there is one
* bool got_one = sJobs.runJobIfAny();
*
* // dequeue a job, but run it manually.
* Job job(sJobs.pop());
* if (job) {
* job();
* }
*
* @endcode
*
* @warning When both sides of the JobQueue are in different threads (as it is usually the case),
* make sure to capture all stack parameters of the job by value (i.e.: do not capture by
* reference, parameters that live on the stack).
*/
class JobQueue {
public:
using Job = std::function<void()>;
JobQueue() = default;
/**
* Push a job to the back of the queue.
* @param func anything that can be called
* (e.g.: lambda, function or method with optional parameters)
* @param args optional parameters to method or function.
*/
template<typename CALLABLE, typename ... ARGS>
void push(CALLABLE&& func, ARGS&&... args) {
enqueue(Job(std::bind(std::forward<CALLABLE>(func), std::forward<ARGS>(args)...)));
}
/**
* Checks whether the JobQueue is empty.
* @return true if there is no jobs in the queue.
*/
bool isEmpty() const;
/**
* Dequeues the oldest job and executes (runs) it.
* @return true if a job was run.
*/
bool runJobIfAny();
/**
* Empties the queue and runs all jobs atomically w.r.t. the queue. Jobs are run in FIFO order.
*/
void runAllJobs();
/**
* Dequeues the oldest job.
* @return a handle to the oldest job or null if the queue was empty.
* @note the job can be manually run by calling job().
*/
Job pop();
private:
JobQueue(const JobQueue& queue) = delete;
JobQueue& operator=(const JobQueue& queue) = delete;
void enqueue(Job&& job);
std::deque<Job> m_queue;
mutable std::mutex m_lock;
};
| 1,100 |
427 | <reponame>sabaalmas/Java-Coding-Problems
package modern.challenge;
public class Cart {
}
| 37 |
2,053 | <gh_stars>1000+
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.client;
import org.eclipse.ui.IFolderLayout;
import org.eclipse.ui.IPageLayout;
import org.eclipse.ui.IPerspectiveFactory;
import scouter.client.group.view.GroupNavigationView;
import scouter.client.stack.views.StackAnalyzerView;
import scouter.client.views.ObjectDailyListView;
import scouter.client.views.ObjectNavigationView;
import scouter.client.views.WorkspaceExplorer;
public class PerspectiveStackAnalyzer implements IPerspectiveFactory {
public static final String ID = PerspectiveStackAnalyzer.class.getName();
public void createInitialLayout(IPageLayout layout) {
String editorArea = layout.getEditorArea();
layout.setEditorAreaVisible(false);
IFolderLayout agentLayout = layout.createFolder(IConstants.LAYOUT_WASSERVICE_OBJECT_NAVIGATION, IPageLayout.LEFT, 0.20f, editorArea);
agentLayout.addPlaceholder(ObjectNavigationView.ID + ":*");
agentLayout.addPlaceholder(ObjectDailyListView.ID + ":*");
agentLayout.addPlaceholder(GroupNavigationView.ID);
agentLayout.addView(ObjectNavigationView.ID);
layout.getViewLayout(ObjectNavigationView.ID).setCloseable(false);
IFolderLayout mainLayout = layout.createFolder("perspective.stack.main", IPageLayout.LEFT, 1.0f, editorArea);
mainLayout.addView(StackAnalyzerView.ID);
layout.getViewLayout(StackAnalyzerView.ID).setCloseable(false);
IFolderLayout explorerFolder = layout.createFolder("perspective.stack.explorer", IPageLayout.BOTTOM, 0.5f, IConstants.LAYOUT_WASSERVICE_OBJECT_NAVIGATION);
explorerFolder.addView(WorkspaceExplorer.ID);
layout.addPerspectiveShortcut(getId());
}
public static String getId() {
return ID;
}
}
| 749 |
842 | #include "say.h"
#include <iostream>
void Say(const std::string& msg) {
std::cout << msg << "!\n";
}
| 47 |
1,144 | <gh_stars>1000+
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.3.0
// See <a href="https://javaee.github.io/jaxb-v2/">https://javaee.github.io/jaxb-v2/</a>
//
/**
* We modified the generated file to have the namespace prefices "invoice", "xenc" and "ds".
* Note that we run jaxb with <code>-npa</code> to it won't try creating a new package-info.java.
* Thx to https://dzone.com/articles/jaxb-and-namespace-prefixes
*/
@javax.xml.bind.annotation.XmlSchema( //
namespace = "http://www.forum-datenaustausch.ch/invoice", //
xmlns = {
@XmlNs(prefix = "invoice", namespaceURI = "http://www.forum-datenaustausch.ch/invoice"),
@XmlNs(prefix = "xenc", namespaceURI = "http://www.w3.org/2001/04/xmlenc#"),
@XmlNs(prefix = "ds", namespaceURI = "http://www.w3.org/2000/09/xmldsig#")
}, //
elementFormDefault = javax.xml.bind.annotation.XmlNsForm.QUALIFIED)
package de.metas.vertical.healthcare_ch.forum_datenaustausch_ch.invoice_440.request;
import javax.xml.bind.annotation.XmlNs;
| 417 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.