hexsha
stringlengths 40
40
| size
int64 3
1.05M
| ext
stringclasses 163
values | lang
stringclasses 53
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
112
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
float64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
113
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
float64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
113
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
float64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.05M
| avg_line_length
float64 1
966k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ed8ac0efc6e933f1e061a1a73ee9ffc38bab9ef2 | 2,724 | pm | Perl | t/filter/TestFilter/both_str_req_proxy.pm | randolf/mod_perl | 561f0672e8da450158b34fdac45a949845f0dc0d | [
"Apache-2.0"
] | 23 | 2015-03-17T11:43:34.000Z | 2022-02-19T13:39:01.000Z | t/filter/TestFilter/both_str_req_proxy.pm | randolf/mod_perl | 561f0672e8da450158b34fdac45a949845f0dc0d | [
"Apache-2.0"
] | 3 | 2019-11-21T03:45:00.000Z | 2021-12-27T23:44:28.000Z | t/filter/TestFilter/both_str_req_proxy.pm | randolf/mod_perl | 561f0672e8da450158b34fdac45a949845f0dc0d | [
"Apache-2.0"
] | 19 | 2015-02-18T21:23:25.000Z | 2022-02-13T16:27:14.000Z | # please insert nothing before this line: -*- mode: cperl; cperl-indent-level: 4; cperl-continued-statement-offset: 4; indent-tabs-mode: nil -*-
package TestFilter::both_str_req_proxy;
# very similar to TestFilter::both_str_req_add, but the request is
# proxified. we filter the POSTed body before it goes via the proxy and
# we filter the response after it returned from the proxy
use strict;
use warnings FATAL => 'all';
use Apache2::RequestRec ();
use Apache2::RequestIO ();
use Apache2::Filter ();
use Apache::TestTrace;
use TestCommon::Utils ();
use Apache2::Const -compile => qw(OK M_POST);
sub in_filter {
my $filter = shift;
debug "input filter";
while ($filter->read(my $buffer, 1024)) {
$filter->print(lc $buffer);
}
Apache2::Const::OK;
}
sub out_filter {
my $filter = shift;
debug "output filter";
while ($filter->read(my $buffer, 1024)) {
$buffer =~ s/\s+//g;
$filter->print($buffer);
}
Apache2::Const::OK;
}
sub handler {
my $r = shift;
debug "response handler";
$r->content_type('text/plain');
if ($r->method_number == Apache2::Const::M_POST) {
$r->print(TestCommon::Utils::read_post($r));
}
return Apache2::Const::OK;
}
1;
__DATA__
<NoAutoConfig>
<IfModule mod_proxy.c>
<Proxy http://@servername@:@port@/*>
<IfModule mod_version.c>
<IfVersion < 2.3.0>
<IfModule @ACCESS_MODULE@>
Order Deny,Allow
Deny from all
Allow from @servername@
</IfModule>
</IfVersion>
<IfVersion > 2.4.1>
<IfModule mod_access_compat.c>
Order Deny,Allow
Deny from all
Allow from @servername@
</IfModule>
</IfVersion>
</IfModule>
</Proxy>
ProxyRequests Off
ProxyPass /TestFilter__both_str_req_proxy/ \
http://@servername@:@port@/TestFilter__both_str_req_proxy_content/
ProxyPassReverse /TestFilter__both_str_req_proxy/ \
http://@servername@:@port@/TestFilter__both_str_req_proxy_content/
</IfModule>
PerlModule TestFilter::both_str_req_proxy
<Location /TestFilter__both_str_req_proxy>
PerlInputFilterHandler TestFilter::both_str_req_proxy::in_filter
PerlOutputFilterHandler TestFilter::both_str_req_proxy::out_filter
</Location>
<Location /TestFilter__both_str_req_proxy_content>
SetHandler modperl
PerlResponseHandler TestFilter::both_str_req_proxy
</Location>
</NoAutoConfig>
| 25.942857 | 144 | 0.604626 |
ed41cd2044ff5193cd13710fc3b00461688d2de1 | 1,155 | al | Perl | benchmark/benchmarks/FASP-benchmarks/data/random/random-0245-110-275.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
] | null | null | null | benchmark/benchmarks/FASP-benchmarks/data/random/random-0245-110-275.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
] | null | null | null | benchmark/benchmarks/FASP-benchmarks/data/random/random-0245-110-275.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
] | null | null | null | 1 5
2 83 89 97
3 35
4 16 89
5 7 9 15 76 108
6 21
7 3 91 102
8 26 65
9 8 14 33 46
10 80
11 33 51
12 30 31 56 70 73 92 96 101
13 84
14 11 55 80 88 107
15 24 88
16 25 69 78
17 62 66 107
18 59 74 103
19 37 56 95
20 1 19 34 68
21 24
22 51 67 90
23 31 42
24 43 50 87 104
25 29 96 100 102
26 70
27 67
28 36 58 93
29 78
30 4 22 69
31 41 88 99
32 16 72
33
34 30 38 79
35
36
37 8 83
38 19 49 58 83
39 8
40 50 73 103
41 13 38
42 21 36 50 62 66 109
43 42 51
44 53
45 19
46 49 86
47 12 14 35
48 39 60 65 75
49
50 20 32 105
51 3 75 80
52
53 16
54 30
55 7 21 80 89
56 17 59
57 28 33 94
58 12 65 104
59
60 18 64 78 80 91
61 3 10 19 27 58
62 36 39 82
63 32 84
64 3 12 71
65 87
66 12 15 92
67 16
68 9 59
69
70 26
71 10 74 78 94 102
72 3 36 45 93 103
73 31 67
74 29 67 101
75 37 73 90
76 9 19 52 58
77 42 71
78 57
79 17 24
80 29 33 52
81 22 97
82 5 57 104
83 68 82 100
84 58 68 76
85 66 96 105
86 2 56 64 82 106 107
87 5 15 46 48
88 39 107
89
90 44 48 91
91 37 41 93
92 16 25 39
93 64
94 53
95 8 12 28
96 19 55 86
97 52 55
98 7 8 34
99 3 49 73
100 1 16 103
101 34 93 94 100
102 26 32
103 33 52 81
104 22 79
105 9 33 52 83
106 8 10 56 92 101 103 110
107 9
108 1 68
109
110 64 | 10.5 | 27 | 0.667532 |
ed44e02d7c44d2ee3a3083ab59b39cb34b0dcc6f | 1,208 | pm | Perl | beer-song/Example.pm | yanick/xperl5 | f56415e9bab363142e7a4fee809574d4a66b0e42 | [
"MIT"
] | 1 | 2015-10-31T22:40:29.000Z | 2015-10-31T22:40:29.000Z | beer-song/Example.pm | dnmfarrell/xperl5 | ab7bf8a35532f6bc1e6630ee596eb32b42232844 | [
"MIT"
] | null | null | null | beer-song/Example.pm | dnmfarrell/xperl5 | ab7bf8a35532f6bc1e6630ee596eb32b42232844 | [
"MIT"
] | null | null | null | package Example;
sub verse {
my $num = shift;
my $song = _line1($num) . _line2($num);
return $song;
}
sub sing {
my ($start, $end) = @_;
$end = 0 unless $end;
my $song;
while ($start >= $end) {
$song .= verse($start);
$song .= "\n";
$start--;
}
return $song;
}
sub _line1 {
my $num = shift;
if ($num > 1) {
return "$num bottles of beer on the wall, $num bottles of beer.\n";
}
elsif ($num == 1) {
return "1 bottle of beer on the wall, 1 bottle of beer.\n";
}
elsif ($num == 0) {
return "No more bottles of beer on the wall, no more bottles of beer.\n";
}
}
sub _line2 {
my $num = shift;
if ($num > 2) {
$num--;
return "Take one down and pass it around, $num bottles of beer on the wall.\n";
}
elsif ($num == 2) {
return "Take one down and pass it around, 1 bottle of beer on the wall.\n";
}
elsif ($num == 1) {
return "Take it down and pass it around, no more bottles of beer on the wall.\n";
}
elsif ($num == 0) {
return "Go to the store and buy some more, 99 bottles of beer on the wall.\n";
}
}
__PACKAGE__;
| 21.963636 | 89 | 0.524007 |
73d3c25b4d038fc5cb6d2b04887631d9ac4db697 | 11,348 | pm | Perl | lib/MusicBrainz/Server/Controller/Edit.pm | kellnerd/musicbrainz-server | 9e058e10219ea6b8942cfd64160ffe19769f747b | [
"BSD-2-Clause"
] | null | null | null | lib/MusicBrainz/Server/Controller/Edit.pm | kellnerd/musicbrainz-server | 9e058e10219ea6b8942cfd64160ffe19769f747b | [
"BSD-2-Clause"
] | null | null | null | lib/MusicBrainz/Server/Controller/Edit.pm | kellnerd/musicbrainz-server | 9e058e10219ea6b8942cfd64160ffe19769f747b | [
"BSD-2-Clause"
] | null | null | null | package MusicBrainz::Server::Controller::Edit;
use Moose;
use Moose::Util qw( does_role );
use Try::Tiny;
BEGIN { extends 'MusicBrainz::Server::Controller' }
use Data::Page;
use DBDefs;
use MusicBrainz::Server::EditRegistry;
use MusicBrainz::Server::Edit::Utils qw( status_names );
use MusicBrainz::Server::Constants qw( :quality );
use MusicBrainz::Server::Validation qw( is_database_row_id );
use MusicBrainz::Server::EditSearch::Query;
use MusicBrainz::Server::Data::Utils qw( type_to_model load_everything_for_edits );
use MusicBrainz::Server::Translation qw( N_l );
use List::AllUtils qw( sort_by );
use aliased 'MusicBrainz::Server::EditRegistry';
with 'MusicBrainz::Server::Controller::Role::Load' => {
model => 'Edit',
entity_name => 'edit',
};
__PACKAGE__->config(
paging_limit => 50,
);
=head1 NAME
MusicBrainz::Server::Controller::Moderation - handle user interaction
with moderations
=head1 DESCRIPTION
This controller allows editors to view moderations, and vote on open
moderations.
=head1 ACTIONS
=head2 moderation
Root of chained actions that work with a single moderation. Cannot be
called on its own.
=cut
sub base : Chained('/') PathPart('edit') CaptureArgs(0) { }
sub _load
{
my ($self, $c, $edit_id) = @_;
return unless is_database_row_id($edit_id);
return $c->model('Edit')->get_by_id($edit_id);
}
sub show : Chained('load') PathPart('')
{
my ($self, $c) = @_;
my $edit = $c->stash->{edit};
load_everything_for_edits($c, [ $edit ]);
$c->form(add_edit_note => 'EditNote');
$c->stash->{template} = 'edit/index.tt';
}
sub data : Chained('load') RequireAuth
{
my ($self, $c) = @_;
my $edit = $c->stash->{edit};
my $accept = $c->req->header('Accept');
if ($accept eq 'application/json') {
$c->res->content_type('application/json; charset=utf-8');
$c->res->body($c->json_utf8->encode({
data => $c->json->decode($edit->raw_data),
status => $edit->status,
type => $edit->edit_type,
}));
return;
}
my $related = $c->model('Edit')->get_related_entities($edit);
my %entities;
while (my ($type, $ids) = each %$related) {
$entities{$type} = $c->model(type_to_model($type))->get_by_ids(@$ids) if @$ids;
}
$c->stash( related_entities => \%entities,
template => 'edit/data.tt' );
}
sub enter_votes : Local RequireAuth DenyWhenReadonly
{
my ($self, $c) = @_;
my $form = $c->form(vote_form => 'Vote');
if ($c->form_posted_and_valid($form)) {
my @submissions = @{ $form->field('vote')->value };
my @votes = grep { defined($_->{vote}) } @submissions;
unless ($c->user->is_editing_enabled || scalar @votes == 0) {
$c->stash(
current_view => 'Node',
component_path => 'edit/CannotVote',
);
return;
}
$c->model('Edit')->insert_votes_and_notes(
$c->user,
votes => [ @votes ],
notes => [ grep { defined($_->{edit_note}) } @submissions ]
);
}
my $redir = $c->req->params->{url} || $c->uri_for_action('/edit/open');
$c->response->redirect($redir);
$c->detach;
}
sub approve : Chained('load') RequireAuth(auto_editor) RequireAuth(editing_enabled) DenyWhenReadonly
{
my ($self, $c) = @_;
$c->model('MB')->with_transaction(sub {
my $edit = $c->model('Edit')->get_by_id_and_lock($c->stash->{edit}->id);
$c->model('Vote')->load_for_edits($edit);
if (!$edit->editor_may_approve($c->user)) {
$c->stash(
current_view => 'Node',
component_path => 'edit/CannotApproveEdit',
component_props => {edit => $edit->TO_JSON},
);
return;
}
else {
if ($edit->approval_requires_comment($c->user)) {
$c->model('EditNote')->load_for_edits($edit);
my $left_note;
for my $note (@{ $edit->edit_notes }) {
next if $note->editor_id != $c->user->id;
$left_note = 1;
last;
}
unless ($left_note) {
$c->stash(
current_view => 'Node',
component_path => 'edit/NoteIsRequired',
component_props => {edit => $edit->TO_JSON},
);
return;
};
}
$c->model('Edit')->approve($edit, $c->user);
$c->redirect_back(
fallback => $c->uri_for_action('/edit/show', [ $edit->id ]),
);
}
});
}
sub cancel : Chained('load') RequireAuth DenyWhenReadonly
{
my ($self, $c) = @_;
my $edit = $c->stash->{edit};
if (!$edit->editor_may_cancel($c->user)) {
$c->stash(
current_view => 'Node',
component_path => 'edit/CannotCancelEdit',
component_props => {edit => $edit->TO_JSON},
);
$c->detach;
}
$c->model('Edit')->load_all($edit);
my $form = $c->form(form => 'Confirm');
if ($c->form_posted_and_valid($form)) {
$c->model('MB')->with_transaction(sub {
$c->model('Edit')->cancel($edit);
if (my $edit_note = $form->field('edit_note')->value) {
$c->model('EditNote')->add_note(
$edit->id,
{
editor_id => $c->user->id,
text => $edit_note
}
);
}
});
$c->response->redirect($c->stash->{cancel_redirect} || $c->req->query_params->{returnto} || $c->uri_for_action('/edit/show', [ $edit->id ]));
$c->detach;
}
}
=head2 open
Show a list of open moderations
=cut
sub open : Local
{
my ($self, $c) = @_;
my $edits = $self->_load_paged($c, sub {
if ($c->user_exists) {
$c->model('Edit')->find_open_for_editor($c->user->id, shift, shift);
} else {
$c->model('Edit')->find_all_open(shift, shift);
}
});
$c->stash( edits => $edits ); # stash early in case an ISE occurs
load_everything_for_edits($c, $edits);
$c->form(add_edit_note => 'EditNote');
}
sub search : Path('/search/edits')
{
my ($self, $c) = @_;
my $coll = $c->get_collator();
my %grouped = MusicBrainz::Server::EditRegistry->grouped_by_name;
$c->stash(
edit_types => [
map [
join(q(,), sort { $a <=> $b } map { $_->edit_type } @{ $grouped{$_} }) => $_
], sort_by { $coll->getSortKey($_) } keys %grouped
],
status => status_names(),
quality => [ [$QUALITY_LOW => N_l('Low')], [$QUALITY_NORMAL => N_l('Normal')], [$QUALITY_HIGH => N_l('High')], [$QUALITY_UNKNOWN => N_l('Default')] ],
languages => [ grep { $_->frequency > 0 } $c->model('Language')->get_all ],
countries => [ $c->model('CountryArea')->get_all ],
relationship_type => [ $c->model('LinkType')->get_full_tree(get_deprecated_and_empty => 1) ]
);
return unless %{ $c->req->query_params };
my $query = MusicBrainz::Server::EditSearch::Query->new_from_user_input($c->req->query_params, $c->user);
$c->stash( query => $query );
if ($query->valid && !$c->req->query_params->{'form_only'}) {
my $edits;
my $timed_out = 0;
try {
$edits = $self->_load_paged($c, sub {
return $c->model('Edit')->run_query($query, shift, shift);
});
} catch {
unless (blessed $_ && does_role($_, 'MusicBrainz::Server::Exceptions::Role::Timeout')) {
die $_; # rethrow
}
$timed_out = 1;
};
if ($timed_out) {
$c->stash( timed_out => 1 );
return;
}
$c->stash(
edits => $edits, # stash early in case an ISE occurs
template => 'edit/search_results.tt',
);
load_everything_for_edits($c, $edits);
$c->form(add_edit_note => 'EditNote');
}
}
sub subscribed : Local RequireAuth {
my ($self, $c) = @_;
my $only_open = 0;
if (($c->req->query_params->{open} // '') eq '1') {
$only_open = 1;
}
my $edits = $self->_load_paged($c, sub {
$c->model('Edit')->subscribed_entity_edits($c->user->id, $only_open, shift, shift);
});
$c->stash(
edits => $edits, # stash early in case an ISE occurs
template => 'edit/subscribed.tt',
);
load_everything_for_edits($c, $edits);
}
sub subscribed_editors : Local RequireAuth {
my ($self, $c) = @_;
my $only_open = 0;
if (($c->req->query_params->{open} // '') eq '1') {
$only_open = 1;
}
my $edits = $self->_load_paged($c, sub {
$c->model('Edit')->subscribed_editor_edits($c->user->id, $only_open, shift, shift);
});
$c->stash(
edits => $edits, # stash early in case an ISE occurs
template => 'edit/subscribed-editors.tt',
);
load_everything_for_edits($c, $edits);
}
sub notes_received : Path('/edit/notes-received') RequireAuth {
my ($self, $c) = @_;
# Log when the editor loaded the page, so that we know when to notify them
# again about new edits (see Data::EditNote::add_note).
my $store = $c->model('MB')->context->store;
my $notes_viewed_key = 'edit_notes_received_last_viewed:' . $c->user->name;
$store->set($notes_viewed_key, time);
# Expire the notification in 30 days.
$store->expire($notes_viewed_key, 60 * 60 * 24 * 30);
my $edit_notes = $self->_load_paged($c, sub {
$c->model('EditNote')->find_by_recipient($c->user->id, shift, shift);
});
$c->model('Editor')->load(@$edit_notes);
$c->model('Edit')->load_for_edit_notes(@$edit_notes);
$c->model('Vote')->load_for_edits(map { $_->edit } @$edit_notes);
$c->stash(
edit_notes => $edit_notes,
template => 'edit/notes-received.tt',
);
}
=head2 conditions
Display a table of all edit types, and their relative conditions
for acceptance
=cut
sub edit_types : Path('/doc/Edit_Types')
{
my ($self, $c) = @_;
my %by_category;
for my $class (EditRegistry->get_all_classes) {
$by_category{$class->edit_category} ||= [];
push @{ $by_category{$class->edit_category} }, $class;
}
for my $category (keys %by_category) {
$by_category{$category} = [
sort { $a->l_edit_name cmp $b->l_edit_name }
@{ $by_category{$category} }
];
}
$c->stash(
by_category => \%by_category,
template => 'doc/edit_types.tt'
);
}
sub edit_type : Path('/doc/Edit_Types') Args(1) {
my ($self, $c, $edit_type) = @_;
my $class;
$class = EditRegistry->class_from_type($edit_type)
if is_database_row_id($edit_type);
$class or $c->detach('/error_404');
my $id = ('Edit Type/' . $class->edit_name) =~ tr/ /_/r;
my $version = $c->model('WikiDocIndex')->get_page_version($id);
my $page = $c->model('WikiDoc')->get_page($id, $version);
$c->stash(
edit_type => $class,
template => 'doc/edit_type.tt',
page => $page
);
}
1;
| 28.729114 | 158 | 0.542034 |
ed95a2236001f3aaef19c4556fdbf4b2815b3ada | 3,061 | pm | Perl | auto-lib/Paws/RDS/CreateDBParameterGroup.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/RDS/CreateDBParameterGroup.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/RDS/CreateDBParameterGroup.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
] | null | null | null |
package Paws::RDS::CreateDBParameterGroup;
use Moose;
has DBParameterGroupFamily => (is => 'ro', isa => 'Str', required => 1);
has DBParameterGroupName => (is => 'ro', isa => 'Str', required => 1);
has Description => (is => 'ro', isa => 'Str', required => 1);
has Tags => (is => 'ro', isa => 'ArrayRef[Paws::RDS::Tag]');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'CreateDBParameterGroup');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::RDS::CreateDBParameterGroupResult');
class_has _result_key => (isa => 'Str', is => 'ro', default => 'CreateDBParameterGroupResult');
1;
### main pod documentation begin ###
=head1 NAME
Paws::RDS::CreateDBParameterGroup - Arguments for method CreateDBParameterGroup on L<Paws::RDS>
=head1 DESCRIPTION
This class represents the parameters used for calling the method CreateDBParameterGroup on the
L<Amazon Relational Database Service|Paws::RDS> service. Use the attributes of this class
as arguments to method CreateDBParameterGroup.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to CreateDBParameterGroup.
=head1 SYNOPSIS
my $rds = Paws->service('RDS');
# To create a DB parameter group.
# This example creates a DB parameter group.
my $CreateDBParameterGroupResult = $rds->CreateDBParameterGroup(
{
'DBParameterGroupFamily' => 'mysql5.6',
'DBParameterGroupName' => 'mymysqlparametergroup',
'Description' => 'My MySQL parameter group'
}
);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/rds/CreateDBParameterGroup>
=head1 ATTRIBUTES
=head2 B<REQUIRED> DBParameterGroupFamily => Str
The DB parameter group family name. A DB parameter group can be
associated with one and only one DB parameter group family, and can be
applied only to a DB instance running a database engine and engine
version compatible with that DB parameter group family.
=head2 B<REQUIRED> DBParameterGroupName => Str
The name of the DB parameter group.
Constraints:
=over
=item *
Must be 1 to 255 letters, numbers, or hyphens.
=item *
First character must be a letter
=item *
Cannot end with a hyphen or contain two consecutive hyphens
=back
This value is stored as a lowercase string.
=head2 B<REQUIRED> Description => Str
The description for the DB parameter group.
=head2 Tags => ArrayRef[L<Paws::RDS::Tag>]
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method CreateDBParameterGroup in L<Paws::RDS>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 27.827273 | 249 | 0.72068 |
ed56f547d6f16a9b8aeff41061f15881eaeb3037 | 4,546 | pm | Perl | modules/EnsEMBL/Draw/GlyphSet/flat_file.pm | nakib103/ensembl-webcode | 4814ccb25ff9925d80b71514c72793917614c614 | [
"Apache-2.0"
] | 16 | 2015-01-14T14:12:30.000Z | 2021-01-27T15:28:52.000Z | modules/EnsEMBL/Draw/GlyphSet/flat_file.pm | nakib103/ensembl-webcode | 4814ccb25ff9925d80b71514c72793917614c614 | [
"Apache-2.0"
] | 250 | 2015-01-05T13:03:19.000Z | 2022-03-30T09:07:12.000Z | modules/EnsEMBL/Draw/GlyphSet/flat_file.pm | nakib103/ensembl-webcode | 4814ccb25ff9925d80b71514c72793917614c614 | [
"Apache-2.0"
] | 98 | 2015-01-05T14:58:48.000Z | 2022-02-15T17:11:32.000Z | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2021] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Draw::GlyphSet::flat_file;
### Module for drawing features parsed from a non-indexed text file (such as
### user-uploaded data)
use strict;
use EnsEMBL::Web::File::User;
use EnsEMBL::Web::IOWrapper;
use Scalar::Util qw(looks_like_number);
use parent qw(EnsEMBL::Draw::GlyphSet::UserData);
sub get_data {
### Method to parse a data file and return information to be displayed
### @return Arrayref - see parent
my $self = shift;
my $data = [];
my ($skip, $strand_to_omit) = $self->get_strand_filters;
return $data if $skip == $self->strand;
my $container = $self->{'container'};
my $hub = $self->{'config'}->hub;
my $species_defs = $self->species_defs;
my $type = $self->my_config('type') || $self->my_config('sub_type');
my $format = $self->my_config('format');
my $legend = {};
## Get the file contents
my %args = (
'hub' => $hub,
'format' => $format,
);
if ($type && $type eq 'url') {
$args{'file'} = $self->my_config('url');
$args{'input_drivers'} = ['URL'];
}
else {
$args{'file'} = $self->my_config('file');
}
my $file = EnsEMBL::Web::File::User->new(%args);
return [] unless $file->exists;
## Set style for VCF here, as other formats define it in different ways
my $adaptor;
if ($format =~ /vcf/i) {
$self->{'my_config'}->set('drawing_style', ['Feature::Variant']);
$self->{'my_config'}->set('height', 12);
$self->{'my_config'}->set('show_overlay', 1);
## Also create adaptor, so we can look up consequence in db
$adaptor = $self->{'config'}->hub->database('variation') ? $self->{'config'}->hub->database('variation')->get_VariationFeatureAdaptor : undef;
}
## Get settings from user interface
my ($colour, $y_min, $y_max);
if ($self->{'my_config'}{'data'}) {
$colour = $self->{'my_config'}{'data'}{'colour'};
$y_min = $self->{'my_config'}{'data'}{'y_min'} if looks_like_number($self->{'my_config'}{'data'}{'y_min'});
$y_max = $self->{'my_config'}{'data'}{'y_max'} if looks_like_number($self->{'my_config'}{'data'}{'y_max'});
}
my $iow = EnsEMBL::Web::IOWrapper::open($file,
'hub' => $hub,
'adaptor' => $adaptor,
'config_type' => $self->{'config'}{'type'},
'track' => $self->{'my_config'}{'id'},
);
if ($iow) {
## Override colourset based on format here, because we only want to have to do this in one place
my $colourset = $iow->colourset || 'userdata';
my $colours = $hub->species_defs->colour($colourset);
$self->{'my_config'}->set('colours', $colours);
$colour ||= $self->my_colour('default');
$self->{'my_config'}->set('colour', $colour);
my $extra_config = {
'strand_to_omit' => $strand_to_omit,
'display' => $self->{'display'},
'use_synonyms' => $hub->species_defs->USE_SEQREGION_SYNONYMS,
'colour' => $colour,
'colours' => $colours,
'y_min' => $y_min,
'y_max' => $y_max,
};
## Parse the file, filtering on the current slice
$data = $iow->create_tracks($container, $extra_config);
#use Data::Dumper; warn '>>> TRACKS '.Dumper($data);
} else {
$self->{'data'} = [];
return $self->errorTrack(sprintf 'Could not read file %s', $self->my_config('caption'));
}
#$self->{'config'}->add_to_legend($legend);
return $data;
}
1;
| 37.262295 | 147 | 0.565112 |
ed805099924e49a66fab7ec86b52e4d956217774 | 310 | al | Perl | code-orig/SourceDiscovery/jmatlab/MATLAB Component Runtime/v70/sys/perl/win32/lib/auto/POSIX/sin.al | usc-isi-i2/eidos | a40dd1b012bfa6e62756b289a1f955a877e5de3f | [
"MIT"
] | 1 | 2021-01-21T15:42:58.000Z | 2021-01-21T15:42:58.000Z | code-orig/SourceDiscovery/jmatlab/MATLAB Component Runtime/v70/sys/perl/win32/lib/auto/POSIX/sin.al | usc-isi-i2/eidos | a40dd1b012bfa6e62756b289a1f955a877e5de3f | [
"MIT"
] | null | null | null | code-orig/SourceDiscovery/jmatlab/MATLAB Component Runtime/v70/sys/perl/win32/lib/auto/POSIX/sin.al | usc-isi-i2/eidos | a40dd1b012bfa6e62756b289a1f955a877e5de3f | [
"MIT"
] | null | null | null | # NOTE: Derived from ..\..\lib\POSIX.pm.
# Changes made here will be lost when autosplit again.
# See AutoSplit.pm.
package POSIX;
#line 347 "..\..\lib\POSIX.pm (autosplit into ..\..\lib\auto/POSIX/sin.al)"
sub sin {
usage "sin(x)" if @_ != 1;
CORE::sin($_[0]);
}
# end of POSIX::sin
1;
| 22.142857 | 76 | 0.590323 |
ed5260ce38889b6942e6dfce8ef6f2325b48007b | 41,653 | t | Perl | t/oo/objects.t | winnit-myself/Wifie | 53284177b4f4107da5b7e2394cefccaa98194ec3 | [
"Artistic-2.0"
] | 312 | 2015-01-15T01:00:51.000Z | 2022-03-31T11:45:50.000Z | t/oo/objects.t | winnit-myself/Wifie | 53284177b4f4107da5b7e2394cefccaa98194ec3 | [
"Artistic-2.0"
] | 108 | 2015-01-01T18:24:22.000Z | 2022-02-25T16:53:52.000Z | t/oo/objects.t | winnit-myself/Wifie | 53284177b4f4107da5b7e2394cefccaa98194ec3 | [
"Artistic-2.0"
] | 80 | 2015-01-14T01:33:52.000Z | 2022-02-26T03:47:55.000Z | #!./parrot
# Copyright (C) 2001-2010, Parrot Foundation.
=head1 NAME
t/oo/objects.t - Objects
=head1 SYNOPSIS
% prove t/oo/objects.t
=head1 DESCRIPTION
Tests the object/class subsystem.
=cut
.sub main :main
.include 'test_more.pir'
.include "iglobals.pasm"
.include "interpinfo.pasm"
plan(191)
get_classname_from_class()
test_get_class()
test_isa()
does_scalar()
does_array()
new_object()
new_object__isa_test()
new_object__classname()
isa_subclass()
isa_subclass__objects()
test_addmethod()
test_addattribute()
addattribute_subclass()
addattribute_subclass__same_name()
set_and_get_object_attribs()
set_and_get_multiple_object_attribs()
attribute_values_are_specific_to_objects()
attribute_values_and_subclassing()
attribute_values_and_subclassing_2()
PMC_as_classes__overridden_mmd_methods()
typeof_class()
typeof_objects()
multiple_inheritance__with_attributes()
attributes_two_levels_of_inheritance()
class_op_test()
anon_subclass_has_no_name()
get_attrib_by_name()
get_attrib_by_name_subclass()
set_attrib_by_name_subclass()
PMC_as_classes()
PMC_as_classes__subclass()
PMC_as_classes__instantiate()
PMC_as_classes__methods()
PMC_as_classes__mmd_methods()
PMC_as_classes__derived_1()
PMC_as_classes__derived_2()
PMC_as_classes__derived_3()
subclassing_Class()
namespace_vs_name()
multiple_anon_classes()
subclassed_Integer_bug()
equality_of_subclassed_Integer()
short_name_attributes()
init_with_and_without_arg()
newclass_bracket_parsing()
verify_namespace_types()
verify_data_type()
new_keyed()
new_keyed_2()
new_keyed_3()
subclass_keyed()
test_class_name_multipart_name()
test_get_class_multipart_name()
isa_bug()
new_nested_ordering()
vtable_override_once_removed()
vtable_fails_for_subclasses_of_core_classes()
super___init_called_twice()
using_class_object_from_typeof_op_with_new()
setting_non_existent_attribute()
setting_non_existent_attribute_by_name()
getting_null_attribute()
getting_non_existent_attribute()
addparent_exceptions_1()
addparent_exceptions_2()
subclassing_a_non_existent_class()
anon_subclass_of_non_existent_class()
addattribute_duplicate()
wrong_way_to_create_new_objects()
attribute_values__subclassing_access_meths()
attribute_values__inherited_access_meths()
# END_OF_TESTS
.end
.sub get_classname_from_class
newclass $P1, "Foo5"
set $S0, $P1
is( $S0, "Foo5", "got classname Foo5" )
subclass $P2, $P1, "Bar5"
set $S1, $P2
is( $S1, "Bar5", "got subclass Bar5" )
subclass $P3, "Foo5", "Baz5"
set $S2, $P3
is( $S2, "Baz5", "got subclass Baz5" )
.end
.sub test_get_class
newclass $P1, "Foo6"
get_class $P2, "Foo6"
set $S2, $P2
is( $S2, "Foo6", 'get_class for Foo6' )
subclass $P3, $P1, "FooBar6"
get_class $P4, "FooBar6"
set $S4, $P4
is( $S4, 'FooBar6', 'get_class for FooBar6' )
get_class $P3, "NoSuch6"
isnull $I0, $P3
ok( $I0, "no class for 'NoSuch6'" )
.end
.sub test_isa
new $P1, ['Boolean']
isa $I0, $P1, "Boolean"
is( $I0, 1, 'Boolean isa Boolean' )
isa $I0, $P1, "Bool"
is( $I0, 0, 'Boolean !isa Bool' )
isa $I0, $P1, "scalar"
is( $I0, 1, 'Boolean isa scalar' )
isa $I0, $P1, "calar"
is( $I0, 0, 'Boolean !isa calar' )
isa $I0, $P1, " "
is( $I0, 0, 'Boolean !isa " "' )
isa $I0, $P1, ""
is( $I0, 0, 'Boolean !isa ""' )
null $S0
isa $I0, $P1, $S0
is( $I0, 0, 'Boolean !isa null $S0' )
set $S0, "scalar"
isa $I0, $P1, $S0
is( $I0, 1, 'Boolean isa scalar $S0' )
.end
.sub does_scalar
new $P1, ['Boolean']
does $I0, $P1, "Boolean"
is( $I0, 0, 'Boolean !does Boolean' )
does $I0, $P1, "Bool"
is( $I0, 0, 'Boolean !does Bool' )
does $I0, $P1, "scalar"
is( $I0, 1, 'Boolean does scalar' )
.end
.sub does_array
new $P1, ['OrderedHash']
does $I0, $P1, "Boolean"
is( $I0, 0, 'OrderedHash !does Boolean' )
does $I0, $P1, "Bool"
is( $I0, 0, 'OrderedHash !does Bool' )
does $I0, $P1, "hash"
is( $I0, 1, 'OrderedHash does hash' )
does $I0, $P1, "array"
is( $I0, 1, 'OrderedHash does array' )
.end
.sub new_object
newclass $P1, "Foo7"
new $P2, "Foo7"
ok( 1, 'created new object from Foo7 class' )
.end
.sub new_object__isa_test
newclass $P1, "Foo8"
new $P2, $P1
ok( 1, 'created new object from Foo8 class' )
isa $I0, $P2, "Foo8"
ok( $I0, 'new object isa Foo8' )
.end
.sub new_object__classname
newclass $P1, "Foo9"
new $P2, $P1
set $S0, $P1 # class
is( $S0, "Foo9", 'new object from Foo9 class as a string is Foo9' )
typeof $S0, $P2 # object
is( $S0, 'Foo9', 'typeof obj is Foo9' )
class $P3, $P1
set $S0, $P1 # class
is( $S0, 'Foo9', 'class of obj is Foo9' )
typeof $S0, $P2 # object
is( $S0, 'Foo9', 'typeof obj is Foo9' )
.end
.sub isa_subclass
newclass $P1, "Foo10"
subclass $P2, $P1, "Bar10"
isa_ok( $P2, "Foo10", 'newclass isa Foo10' )
isa_ok( $P2, "Bar10", 'new subclass isa Bar10' )
isa_ok( $P2, "Foo10", 'new subclass isa parent' )
isa_ok( $P2, "Class", 'new subclass isa Class' )
isa $I0, $P2, "Object"
is( $I0, 0, 'new subclass !isa Object' )
.end
.sub isa_subclass__objects
newclass $P3, "Foo30"
subclass $P4, $P3, "Bar30"
$P1 = $P3.'new'()
$P2 = $P4.'new'()
isa_ok( $P1, "Foo30", 'obj isa its class' )
isa_ok( $P2, "Bar30", 'obj isa its class' )
isa_ok( $P2, "Foo30", 'obj isa its parent class' )
isa_ok( $P2, "Object", 'obj isa Object' )
isa_ok( $P2, "Class", 'obj isa Class' )
.end
.sub test_addmethod
newclass $P0, 'Foo31'
$P2 = get_hll_global 'sayFoo31'
# add a method BEFORE creating a Foo object
addmethod $P0, 'foo31', $P2
$P1 = new 'Foo31'
$P1.'foo31'()
# get a method from some other namespace
$P2 = get_hll_global ['Bar31'], 'sayBar31'
# add a method AFTER creating the object
addmethod $P0, 'bar31', $P2
$P1.'bar31'()
.end
.sub sayFoo31
ok( 1, 'called method added before creating obj' )
.end
.namespace ['Bar31']
.sub sayBar31
ok( 1, 'called method added after created obj' )
.end
.namespace [] # Reset to root namespace for next test
.sub test_addattribute
newclass $P1, "Foo11"
addattribute $P1, "foo_i"
ok( 1, 'addattribute did not blow up' )
set $S0, $P1
is( $S0, "Foo11", '$P1 is still the same class as PMC' )
# Check that we can add multiple attributes
set $I0, 0
l1:
set $S0, $I0
addattribute $P1, $S0
inc $I0
lt $I0, 1000, l1
ok( 1, 'addattribute 1000x without blow up' )
.end
.sub addattribute_subclass
newclass $P1, "Foo12"
addattribute $P1, "foo_i"
ok( 1, 'addattribute to Foo12' )
subclass $P2, $P1, "Bar12"
addattribute $P2, "bar_i"
ok( 1, 'addattribute to subclass of Foo12' )
.end
.sub addattribute_subclass__same_name
newclass $P1, "Foo32"
addattribute $P1, "i"
addattribute $P1, "j"
subclass $P2, $P1, "Bar32"
addattribute $P2, "j"
addattribute $P2, "k"
ok( 1, 'created class and subclass and added attributes' )
.local pmc o
o = $P2.'new'()
$P0 = getattribute o, 'i'
is( $P0, 'Foo32.i', 'parent attrib initialized in init' )
$P0 = getattribute o, ['Foo32'], 'j'
is( $P0, 'Foo32.j', 'parent attrib initialized in init' )
$P0 = getattribute o, ['Bar32'], 'j'
is( $P0, 'Bar32.j', 'subclass attrib initialized in init' )
$P0 = getattribute o, 'k'
is( $P0, 'Bar32.k', 'subclass attrib initialized in init' )
$P0 = getattribute o, 'i'
is( $P0, 'Foo32.i', 'parent attrib init-ed' )
$P0 = getattribute o, ['Foo32'], "j"
is( $P0, 'Foo32.j', 'parent attrib init-ed' )
$P0 = getattribute o, 'j'
is( $P0, 'Bar32.j', 'subclass attrib returned over parent' )
$P0 = getattribute o, 'k'
is( $P0, 'Bar32.k', 'subclass attrib init-ed' )
.end
.namespace ['Bar32']
.sub init :vtable :method
$P0 = new ['String']
$P0 = 'Foo32.i'
setattribute self, ['Foo32'], "i", $P0
$P0 = new ['String']
$P0 = 'Foo32.j'
setattribute self, ["Foo32"], "j", $P0
$P0 = new ['String']
$P0 = 'Bar32.j'
setattribute self, ["Bar32"], "j", $P0
$P0 = new ['String']
$P0 = 'Bar32.k'
setattribute self, ["Bar32"], "k", $P0
.end
.namespace [] # Reset to root namespace for next test
.sub set_and_get_object_attribs
newclass $P1, "Foo13"
addattribute $P1, "i"
new $P2, $P1
new $P3, ['Integer']
set $P3, 1024
setattribute $P2, "i", $P3
new $P4, ['Integer']
getattribute $P4, $P2, "i"
is( $P4, 1024, 'set/get Integer attribute' )
.end
.sub set_and_get_multiple_object_attribs
newclass $P1, "Foo14"
addattribute $P1, "i"
addattribute $P1, "j"
new $P2, "Foo14"
new $P3, ['Integer']
set $P3, 4201
new $P4, ['Hash']
set $P4["Key"], "Value"
setattribute $P2, "i", $P3
setattribute $P2, "j", $P4
getattribute $P5, $P2, "i"
is( $P5, '4201', 'set/get Integer attribute' )
getattribute $P6, $P2, "j"
set $S0, $P6["Key"]
is( $S0, 'Value', 'set/get Hash attribute on same obj' )
.end
.sub attribute_values_are_specific_to_objects
newclass $P1, "Foo15"
addattribute $P1, "i"
new $P2, $P1
new $P3, $P1
new $P4, ['Integer']
set $P4, 100
setattribute $P2, "i", $P4
new $P5, ['String']
set $P5, "One hundred"
setattribute $P3, "i", $P5
getattribute $P6, $P2, "i"
is( $P6, 100, 'attribute value on 1st object is specific to obj' )
getattribute $P6, $P3, "i"
is( $P6, 'One hundred', 'attribute value on 2nd obj is specific to obj' )
.end
.sub attribute_values_and_subclassing
newclass $P1, "Foo16"
addattribute $P1, "i"
addattribute $P1, "j"
subclass $P2, $P1, "Bar16"
addattribute $P2, "k"
addattribute $P2, "l"
new $P2, "Bar16"
new $P3, "Bar16"
# Note that setattribute holds the actual PMC, not a copy, so
# in this test both attributes get the PMC from $P4, and should
# both have the same value, despite the C<inc>.
new $P4, ['Integer']
set $P4, 10
setattribute $P2, "i", $P4
inc $P4
setattribute $P2, "j", $P4
new $P5, ['Integer']
set $P5, 100
setattribute $P3, "i", $P5
inc $P5
setattribute $P3, "j", $P5
getattribute $P6, $P2, "i"
is( $P6, 11, 'setattrib with a PMC holds actual PMC not copy' )
getattribute $P6, $P2, "j"
is( $P6, 11, '...so changes to the PMC appear through the attrib' )
getattribute $P6, $P3, "i"
is( $P6, 101, '...and second test on new objects' )
getattribute $P6, $P3, "j"
is( $P6, 101, '...should have same result' )
.end
.sub attribute_values_and_subclassing_2
newclass $P1, "Foo17"
# must add attributes before object instantiation
addattribute $P1, ".i"
addattribute $P1, ".j"
subclass $P2, $P1, "Bar17"
addattribute $P2, ".k"
addattribute $P2, ".l"
# subclass is preferred for the SI case over
# newclass $P2, "Bar"
# addattrib ...
# addparent $P2, $P1
# which is suitable for adding multiple parents to one class
# instantiate a Bar object
new $P3, "Bar17"
# Set the attribute values
new $P10, ['String'] # set attribute values
set $P10, "i" # attribute slots have reference semantics
setattribute $P3, ".i", $P10 # so always put new PMCs in
# if you have unique values
new $P10, ['String']
set $P10, "j"
setattribute $P3, ".j", $P10
new $P10, ['String']
set $P10, "k"
setattribute $P3, ".k", $P10
new $P10, ['String']
set $P10, "l"
setattribute $P3, ".l", $P10
# retrieve attribs
getattribute $P11, $P3, ".i"
is( $P11, "i", 'string attribute get/set on parent' )
getattribute $P11, $P3, ".j"
is( $P11, "j", 'string attribute get/set on parent' )
getattribute $P11, $P3, ".k"
is( $P11, "k", 'string attribute get/set on subclass' )
getattribute $P11, $P3, ".l"
is( $P11, "l", 'string attribute get/set on subclass' )
.end
.sub PMC_as_classes__overridden_mmd_methods
.local pmc myint, i, j, k
get_class $P0, "Integer"
subclass myint, $P0, "MyInt1"
i = new 'MyInt1'
j = new 'MyInt1'
k = new 'MyInt1'
i = 6
j = 7
k = i + j
is( k, 13, 'added two MyInt1' )
j = new ['Integer']
j = 100
k = i + j
is( k, 106, 'added MyInt1 and an Integer' )
.end
.namespace ["MyInt1"]
.sub add :multi(MyInt1, MyInt1, MyInt1)
.param pmc self
.param pmc right
.param pmc dest
ok( 1, 'in the add method' )
$P0 = getattribute self, ['Integer'], "proxy"
$I0 = $P0
$I1 = right
$I2 = $I0 + $I1
dest = $I2
.return(dest)
.end
.namespace [] # Reset to root namespace for next test
.sub typeof_class
newclass $P0, "Foo21"
typeof $S0, $P0
is( $S0, "Class", 'typeof for a Class PMC is "Class"' )
.end
.sub typeof_objects
newclass $P0, "A"
newclass $P1, "B"
new $P0, ['A']
new $P1, ['B']
typeof $S0, $P0
typeof $S1, $P1
is( $S0, 'A', 'typeof object of class A is "A"' )
is( $S1, 'B', 'typeof object of class B is "B"' )
.end
.sub multiple_inheritance__with_attributes
newclass $P1, "Star"
addattribute $P1, "Spectral Type"
newclass $P2, "Company"
addattribute $P2, "Annual Profit"
subclass $P3, $P1, "Sun"
addparent $P3, $P2
new $P4, ['Sun']
new $P5, ['String']
set $P5, "G"
setattribute $P4, "Spectral Type", $P5
new $P6, ['String']
set $P6, "$100,000,000"
setattribute $P4, "Annual Profit", $P6
getattribute $P7, $P4, "Spectral Type"
is( $P7, 'G', 'direct parents attribute' )
getattribute $P8, $P4, "Annual Profit"
is( $P8, '$100,000,000', "addparent's attribute" )
.end
.sub attributes_two_levels_of_inheritance
newclass $P0, "Astronomical Object"
addattribute $P0, "Location"
subclass $P1, $P0, "Star2"
addattribute $P1, "Spectral Type"
newclass $P2, "Sun2"
addparent $P2, $P1
addparent $P2, $P0
new $P4, "Sun2"
new $P5, ['String']
set $P5, "Taurus"
setattribute $P4, "Location", $P5
getattribute $P6, $P4, "Location"
is( $P6, 'Taurus', 'attributes with two levels of inheritance' )
.end
.sub class_op_test
newclass $P0, "City1"
new $P1, "City1"
class $P2, $P1
set $S0, $P2
is( $S0, 'City1', 'class op works' )
.end
.sub anon_subclass_has_no_name
newclass $P0, "City2"
subclass $P1, $P0
set $S0, $P1
is( $S0, '', 'anonymous subclass has no name' )
.end
.sub get_attrib_by_name
newclass $P1, "Foo18"
addattribute $P1, "i"
new $P2, "Foo18"
new $P3, ['String']
set $P3, "ok"
setattribute $P2, "i", $P3
getattribute $P4, $P2, ["Foo18"], "i"
is( $P4, 'ok', 'get attrib by name' )
.end
.sub get_attrib_by_name_subclass
newclass $P0, "Bar19"
addattribute $P0, "j"
subclass $P1, $P0, "Foo19"
addattribute $P1, "i"
new $P2, "Foo19"
new $P3, ['String']
set $P3, "foo i"
setattribute $P2, "i", $P3
new $P3, ['String']
set $P3, "bar j"
setattribute $P2, "j", $P3
getattribute $P4, $P2, ["Foo19"], "i"
is( $P4, 'foo i', 'attribute from subclass get by name' )
getattribute $P4, $P2, ["Bar19"], "j"
is( $P4, 'bar j', 'attribute from parent class get by name' )
.end
.sub set_attrib_by_name_subclass
newclass $P0, "Bar20"
addattribute $P0, "j"
subclass $P1, $P0, "Foo20"
addattribute $P1, "i"
new $P2, "Foo20"
new $P3, ['String']
set $P3, "foo i"
setattribute $P2, ["Foo20"], "i", $P3
new $P3, ['String']
set $P3, "bar j"
setattribute $P2, ["Bar20"], "j", $P3
getattribute $P4, $P2, "i"
is( $P4, 'foo i', 'attribute from subclass set by name' )
getattribute $P4, $P2, "j"
is( $P4, 'bar j', 'attribute from parent class set by name' )
.end
.sub PMC_as_classes
get_class $P0, "Integer"
ok( 1, "get_class of Integer didn't croak" )
get_class $P0, "Integer"
ok( 1, "get_class of Integer didn't croak second time" )
typeof $S0, $P0
is( $S0, 'PMCProxy', 'typeof PMCProxy' )
.end
.sub PMC_as_classes__subclass
.local pmc MyInt3
get_class $P0, "Integer"
ok( 1, "get_class on Integer didn't blow up" )
subclass MyInt3, $P0, "MyInt3"
ok( 1, "subclassing didn't blow up" )
$S0 = typeof MyInt3
is( $S0, 'Class', 'new subclass is typeof Class' )
$I0 = isa MyInt3, "MyInt3"
ok( $I0, 'new subclass isa MyInt' )
$I0 = isa MyInt3, "Integer"
ok( $I0, 'new subclass isa parent class' )
.end
.sub PMC_as_classes__instantiate
.local pmc MyInt4
get_class $P0, "Integer"
ok( 1, 'able to get_class of Integer' )
subclass MyInt4, $P0, "MyInt4"
addattribute MyInt4, ".i"
ok( 1, 'able to addattribute to subclass' )
.local pmc i
i = new "MyInt4"
ok( 1, 'able to instantiate obj of subclass w/ attribute' )
.end
.sub PMC_as_classes__methods
.local pmc MyInt5
get_class $P0, "Integer"
subclass MyInt5, $P0, "MyInt5"
addattribute MyInt5, "intval"
.local pmc i, i2
i = new "MyInt5"
i2 = new ['Integer']
i2 = 43
i = 42 # set_integer is inherited from Integer
ok( 1, 'able to assign int to MyInt' )
$I0 = i # get_integer is overridden below
is( $I0, 42, 'get_integer is overridden for MyInt5' )
$S0 = i # get_string is overridden below
is( $S0, 'MyInt5(42)', 'get_string is overridden for MyInt5' )
.end
.namespace ["MyInt5"]
.sub set_integer_native :vtable :method
.param int new_value
$P1 = new ['Integer']
$P1 = new_value
setattribute self, "intval", $P1
.end
.sub get_integer :vtable :method
$P0 = getattribute self, "intval"
$I0 = $P0
.return ($I0)
.end
.sub get_string :vtable :method
$P0 = getattribute self, "intval"
$I0 = $P0
$S1 = $I0
$S0 = "MyInt5("
$S0 .= $S1
$S0 .= ")"
.return ($S0)
.end
.namespace [] # Reset to root namespace for next test
.sub PMC_as_classes__mmd_methods
.local pmc MyInt6
get_class $P0, "Integer"
subclass MyInt6, $P0, "MyInt6"
.local pmc i
.local pmc j
.local pmc k
i = new "MyInt6"
j = new "MyInt6"
k = new "MyInt6"
i = 6
j = 7
k = i * j
$I0 = k
is( $I0, 42, 'MyInt6 defaults to Integer class for mult' )
$S0 = k # get_string is overridden below
is( $S0, 'MyInt6(42)', 'get_string is overridden for MyInt6' )
.end
.namespace ["MyInt6"]
.sub get_string :vtable :method
$I0 = self # get_integer is not overridden
$S1 = $I0
$S0 = "MyInt6("
$S0 .= $S1
$S0 .= ")"
.return ($S0)
.end
.namespace [] # Reset to root namespace for next test
.sub PMC_as_classes__derived_1
.local pmc MyInt8
.local pmc MyInt8_2
get_class $P0, "Integer"
subclass MyInt8, $P0, "MyInt8"
addattribute MyInt8, 'intval'
get_class $P1, "MyInt8"
subclass MyInt8_2, $P1, "MyInt8_2"
.local pmc i
i = new "MyInt8_2"
$I0 = isa i, "Integer"
ok( $I0, 'obj isa grandparent (Integer)' )
$I0 = isa i, "MyInt8"
ok( $I0, 'obj isa parent (MyInt8)' )
$I0 = isa i, "MyInt8_2"
ok( $I0, 'obj isa its class (MyInt8_2)' )
i = 42 # set_integer is overridden below
$I0 = i # get_integer is overridden below
is( $I0, 42, 'set/get_integer overridden' )
$S0 = i # get_string is overridden below
is( $S0, 'MyInt8_2(42)', 'set/get_string overridden' )
.end
.namespace ["MyInt8"]
.sub 'set_integer_native' :vtable :method
.param int val
$P1 = new ['Integer']
$P1 = val
setattribute self, "intval", $P1
.return ()
.end
.sub get_integer :vtable :method
$P0 = getattribute self, 'intval'
$I0 = $P0
.return ($I0)
.end
.sub get_string :vtable :method
$P0 = getattribute self, 'intval'
$I0 = $P0
$S1 = $I0
$S0 = typeof self
$S0 .= "("
$S0 .= $S1
$S0 .= ")"
.return ($S0)
.end
.namespace [] # Reset to root namespace for next test
.sub PMC_as_classes__derived_2
.local pmc MyInt9
.local pmc MyInt9_2
get_class $P0, "Integer"
subclass MyInt9, $P0, "MyInt9"
addattribute MyInt9, 'intval'
get_class $P1, "MyInt9"
subclass MyInt9_2, $P1, "MyInt9_2"
.local pmc i
i = new "MyInt9_2"
$I0 = isa i, "Integer"
ok( $I0, 'obj isa grandparent (Integer)' )
$I0 = isa i, "MyInt9"
ok( $I0, 'obj isa parent (MyInt9)' )
$I0 = isa i, "MyInt9_2"
ok( $I0, 'obj isa its class (MyInt9_2)' )
i = 42 # set_integer is overridden below
$I0 = i # get_integer is overridden below
is( $I0, 43, 'set/get_integer overridden' )
$S0 = i # get_string is overridden below
is( $S0, 'MyInt9_2(42)', 'set/get_string overridden' )
.end
.namespace ["MyInt9_2"]
# subclassing methods from MyInt9 is ok
# this one changes the value a bit
.sub get_integer :vtable :method
$P0 = getattribute self, 'intval'
$I0 = $P0
inc $I0 # <<<<<
.return ($I0)
.end
.namespace ["MyInt9"]
.sub 'set_integer_native' :vtable :method
.param int val
$P1 = new ['Integer']
$P1 = val
setattribute self, "intval", $P1
.return ()
.end
.sub get_integer :vtable :method
$P0 = getattribute self, 'intval'
$I0 = $P0
.return ($I0)
.end
.sub get_string :vtable :method
$P0 = getattribute self, 'intval'
$I0 = $P0
$S1 = $I0
$S0 = typeof self
$S0 .= "("
$S0 .= $S1
$S0 .= ")"
.return ($S0)
.end
.namespace [] # Reset to root namespace for next test
.sub PMC_as_classes__derived_3
.local pmc MyInt10
.local pmc MyInt10_2
get_class $P0, "Integer"
subclass MyInt10, $P0, "MyInt10"
addattribute MyInt10, 'intval'
get_class $P1, "MyInt10"
subclass MyInt10_2, $P1, "MyInt10_2"
.local pmc i
i = new "MyInt10_2"
$I0 = isa i, "Integer"
ok( $I0, 'obj isa grandparent (Integer)' )
$I0 = isa i, "MyInt10"
ok( $I0, 'obj isa parent (MyInt10)' )
$I0 = isa i, "MyInt10_2"
ok( $I0, 'obj isa its class (MyInt102)' )
i = 42 # set_integer is overridden below
$I0 = i # get_integer is overridden below
is( $I0, 42, 'set/get_integer overridden' )
$S0 = i # get_string is overridden below
is( $S0, 'MyInt10_2(42)', 'set/get_string overridden' )
.end
.namespace ["MyInt10_2"]
.sub get_integer :vtable :method
$P0 = getattribute self, 'intval'
$I0 = $P0
.return ($I0)
.end
.sub get_string :vtable :method
$P0 = getattribute self, 'intval'
$I0 = $P0
$S1 = $I0
$S0 = typeof self
$S0 .= "("
$S0 .= $S1
$S0 .= ")"
.return ($S0)
.end
.namespace ['MyInt10']
.sub 'set_integer_native' :vtable :method
.param int val
$P1 = new ['Integer']
$P1 = val
setattribute self, "intval", $P1
.return ()
.end
.namespace [] # Reset to root namespace for next test
.sub subclassing_Class
.local pmc cl
.local pmc parent
parent = get_class "Class"
cl = subclass parent, "Foo33"
ok( 1, 'able to subclass Class' )
.local pmc o
o = new "Foo33"
ok( 1, 'able to instantiate subclass of Class' )
$S0 = typeof o
is( $S0, 'Foo33', 'object returns correct class' )
.end
.sub namespace_vs_name
.local pmc o, cl, f
newclass cl, "Foo34"
o = new "Foo34"
is( o, 'Foo34::get_string', 'found Foo34 namespace' )
o = Foo34()
is( o, 'Foo34', 'found global Foo34' )
f = get_global "Foo34"
o = f()
is( o, 'Foo34', 'found global Foo34 explicitly' )
f = get_global ["Foo34"], "Foo34"
o = f()
is( o, 'Foo34::Foo34', 'found method in Foo34 namespace' )
.end
.sub Foo34
.return("Foo34")
.end
.namespace [ "Foo34" ]
.sub get_string :vtable :method
.return("Foo34::get_string")
.end
.sub Foo34
.return("Foo34::Foo34")
.end
.namespace [] # Reset to root namespace for next test
.sub multiple_anon_classes
newclass $P0, "City3"
subclass $P1, $P0
newclass $P2, "State3"
subclass $P3, $P2
ok( 1, "multiple anon classes didn't croak (bug #33103)" )
.end
.sub subclassed_Integer_bug
.local pmc class
.local pmc a
.local pmc b
subclass class, "Integer", "LispInteger1"
a = new "LispInteger1"
b = new "LispInteger1"
a = 1
b = 1
set $S0, a
is( $S0, '1', 'subclassed Integer is 1' )
set $S0, b
is( $S0, '1', 'subclassed Integer is 1' )
a = a * b
set $S0, a
is( $S0, '1', 'multiply and reassign to subclassed Integer is 1' )
.end
.sub equality_of_subclassed_Integer
.local pmc class
class = subclass "Integer", "LispInteger2"
.local pmc a
a = new 'LispInteger2'
a = 123
.local pmc b
b = new 'LispInteger2'
b = 123
$I0 = a == b
ok( $I0, '123 is equal to 123' )
.end
.sub short_name_attributes
newclass $P1, "Foo22"
addattribute $P1, "i"
addattribute $P1, "j"
subclass $P2, $P1, "Bar22"
addattribute $P2, "k"
addattribute $P2, "l"
new $P2, "Bar22"
# set a bunch of attribs
new $P4, ['Integer']
set $P4, 10
setattribute $P2, "i", $P4
new $P4, ['Integer']
set $P4, 11
setattribute $P2, "j", $P4
new $P4, ['Integer']
set $P4, 20
setattribute $P2, "k", $P4
new $P4, ['Integer']
set $P4, 21
setattribute $P2, "l", $P4
getattribute $P6, $P2, "i"
is( $P6, 10, '"i" getattribute on parent class attrib' )
getattribute $P6, $P2, "j"
is( $P6, 11, '"j" getattribute on parent class attrib' )
getattribute $P6, $P2, "k"
is( $P6, 20, '"k" getattribute on subclass attrib' )
getattribute $P6, $P2, "l"
is( $P6, 21, '"l" getattribute on subclass attrib' )
getattribute $P6, $P2, ["Foo22"], "i"
is( $P6, 10, '["Foo22"], "i" getattribute on parent class attrib' )
getattribute $P6, $P2, ["Bar22"], "k"
is( $P6, 20, '["Bar22"], "k" getattribute on subclass attrib' )
.end
.sub init_with_and_without_arg
.local pmc cl, o, h, a
cl = newclass "Foo35"
addattribute cl, "a"
o = cl.'new'()
a = getattribute o, "a"
is( a, 'ok 1', 'init without an arg' )
h = new ['Hash']
$P0 = new ['String']
$P0 = "ok 2"
h['a'] = $P0
o = new cl, h
a = getattribute o, "a"
is( a, 'ok 2', 'init with an arg' )
.end
.namespace ["Foo35"]
.sub init_pmc :vtable :method
.param pmc args
$P0 = args['a']
setattribute self, 'a', $P0
.return()
.end
.sub init :vtable :method
$P0 = new ['String']
$P0 = "ok 1"
setattribute self, 'a', $P0
.end
.namespace [] # Reset to root namespace for next test
.sub newclass_bracket_parsing
newclass $P0, ['Foo23';'Bar23']
ok( 1, 'newclass created with brackets' )
.end
.sub verify_namespace_types
newclass $P0, ['Foo24';'Bar24']
getinterp $P0
set $P1, $P0[.IGLOBALS_CLASSNAME_HASH]
typeof $S0, $P1
is( $S0, 'NameSpace', 'namespace verified' )
set $P2, $P1['Foo24']
typeof $S0, $P2
is( $S0, 'NameSpace', 'namespace verified' )
.end
.sub verify_data_type
newclass $P0, ['Foo25';'Bar25']
getinterp $P0
set $P1, $P0[.IGLOBALS_CLASSNAME_HASH]
set $P2, $P1['Foo25']
set $P3, $P2['Bar25']
set $I0, $P3
isgt $I0, $I0, 0
ok( $I0, 'verified datatype > 0' )
.end
# Puts init in a namespace
.sub new_keyed
.local pmc cl, o, p
cl = newclass ['Foo36';'Bar36']
addattribute cl, "init_check"
o = cl.'new'()
ok( 1, 'obj successfully created' )
p = getattribute o, "init_check"
is( p, 999, "overridden init called")
.end
.namespace ['Foo36';'Bar36']
.sub init :vtable :method
.local pmc p
p = new ['Integer']
p = 999
setattribute self, "init_check", p
.end
.namespace [] # revert to root for next test
.sub new_keyed_2
.local pmc c1, c2, o1, o2
c1 = newclass ['Foo37';'Bar37']
c2 = newclass ['Foo37';'Fuz37']
o1 = c1.'new'()
o2 = c2.'new'()
ok( 1, 'objects created successfully' )
.end
.namespace ['Foo37';'Bar37']
.sub init :vtable :method
ok( 1, '__init Bar37' )
.end
.namespace ['Foo37';'Fuz37']
.sub init :vtable :method
ok( 1, '__init Fuz37' )
.end
.namespace [] # revert to root for next test
.sub new_keyed_3
.local pmc c1, c2, c3, o1, o2, o3
c1 = newclass ['Foo38';'Bar38']
c2 = newclass ['Foo38';'Buz38']
c3 = newclass 'Foo38'
o1 = new ['Foo38';'Bar38']
o2 = new ['Foo38';'Buz38']
o3 = new 'Foo38'
ok( 1, 'objects created successfully' )
.end
.namespace ['Foo38';'Bar38']
.sub init :vtable :method
ok( 1, '__init Bar38' )
.end
.namespace ['Foo38';'Buz38']
.sub init :vtable :method
ok( 1, '__init Buz38' )
.end
.namespace ['Foo38']
.sub init :vtable :method
ok( 1, '__init Foo38' )
.end
.namespace [] # revert to root for next test
.sub subclass_keyed
.local pmc base, o1, o2
base = subclass 'Hash', ['Perl6-3'; 'PAST'; 'Node']
addattribute base, '$.source' # original source
addattribute base, '$.pos' # offset position
$P0 = subclass base, ['Perl6-3'; 'PAST'; 'Sub']
$P0 = subclass base, ['Perl6-3'; 'PAST'; 'Stmt']
ok( 1, 'ok 1\n' )
o1 = new ['Perl6-3'; 'PAST'; 'Sub']
o2 = new ['Perl6-3'; 'PAST'; 'Stmt']
ok( 1, 'objects created successfully' )
.end
.namespace ['Perl6-3'; 'PAST'; 'Stmt']
.sub init :vtable :method
ok( 1, '__init Stmt' )
.end
.namespace ['Perl6-3'; 'PAST'; 'Sub']
.sub init :vtable :method
ok( 1, '__init Sub' )
.end
.namespace [] # revert to root for next test
.sub test_class_name_multipart_name
.local pmc base, o1
base = subclass 'Hash', ['Perl6'; 'PAST'; 'Node']
o1 = new base
$S0 = typeof o1
is( $S0, "Perl6;PAST;Node", "typeof returns object's class name" )
.end
.sub test_get_class_multipart_name
.local pmc base, o1
base = subclass 'Hash', ['Perl6a'; 'PAST'; 'Node']
$P0 = get_class ['Perl6a'; 'PAST'; 'Node']
o1 = new $P0
$S0 = typeof o1
is( $S0, 'Perl6a;PAST;Node', 'typeof returns objects created from get_class' )
.end
.sub isa_bug
.local pmc base, o1, o2
base = subclass 'Hash', ['Perl6b'; 'PAST'; 'Node']
$P0 = new [ 'Perl6b'; 'PAST'; 'Node' ]
$I0 = isa $P0, [ 'Perl6b'; 'PAST'; 'Node']
is( $I0, 1, 'obj isa the full class name' )
$I0 = isa $P0, 'Hash'
is( $I0, 1, 'obj isa the parent class' )
$I0 = isa $P0, 'Perl6b'
is( $I0, 0, 'obj !isa the first part of the class name' )
.end
.sub new_nested_ordering
.local pmc c1, c2, o
c1 = newclass ['Foo39']
c2 = newclass ['Foo39';'Bar39']
o = c2.'new'()
ok( 1, 'objects created successfully' )
.end
.namespace ['Foo39']
.sub init :vtable :method
ok( 0, '__init Foo39' ) # shouldn't be called
.end
.namespace ['Foo39';'Bar39']
.sub init :vtable :method
ok( 1, '__init Bar39' ) # should be called
.end
.namespace [] # revert to root for next test
.sub vtable_override_once_removed
.local pmc base
$P0 = get_class 'Integer'
base = subclass $P0, 'Foo40' # create subclass 'Foo40'
addattribute base, '@!capt'
$P0 = subclass 'Foo40', 'Bar40' # create subclass 'Bar40'
$P1 = new 'Bar40' # create an instance of 'Bar40'
$S1 = $P1 # get its string representation
is( $S1, 'ok bar', 'get_string overridden' )
.end
.namespace [ 'Bar40' ]
.sub 'get_string' :vtable :method
$S0 = 'ok bar'
.return ($S0)
.end
.namespace [] # revert to root for next test
.sub vtable_fails_for_subclasses_of_core_classes
$P0 = subclass 'Hash', 'Foo41'
$P0 = subclass 'Hash', 'Bar41'
$P1 = new 'Foo41'
$S1 = $P1
is( $S1, 'Hello world', 'get_string :vtable :method' )
$P1 = new 'Bar41'
$S1 = $P1
is( $S1, 'Hello world', 'get_string :method :vtable' )
.end
.namespace [ 'Foo41' ]
.sub 'get_string' :vtable :method
.return('Hello world')
.end
.namespace [ 'Bar41' ]
.sub 'get_string' :method :vtable
.return('Hello world')
.end
.namespace [] # revert to root for next test
.sub super___init_called_twice
$P0 = newclass 'Foo42'
$P1 = subclass $P0, 'Bar42'
addattribute $P1, 'i'
$P2 = $P1.'new'()
.end
.namespace [ 'Foo42' ]
.sub 'init' :vtable :method
$P0 = getattribute self, 'i'
isnull $I1, $P0
ok( $I1, 'should be null' )
$P1 = new ['Integer']
setattribute self, "i", $P1 # i won't be null if init called again
.return ()
.end
.namespace [] # revert to root for next test
.sub using_class_object_from_typeof_op_with_new
$P0 = newclass [ "Monkey" ; "Banana" ]
$P0 = $P0.'new'()
$S0 = $P0."ook"()
is( $S0, "Ook!", 'obj created from .new() class method' )
$P2 = typeof $P0
$P3 = new $P2
$S0 = $P3."ook"()
is( $S0, "Ook!", 'obj created from "new" called on result of typeof' )
.end
.namespace [ "Monkey" ; "Banana" ]
.sub ook :method
$S1 = "Ook!"
.return ($S1)
.end
.namespace [] # revert to root for next test
.macro exception_is ( M )
.local pmc exception
.local string message
.get_results (exception)
message = exception['message']
is( message, .M, .M )
.endm
.sub setting_non_existent_attribute
newclass $P1, "Foo45"
new $P2, $P1
new $P3, ['Integer']
push_eh handler
setattribute $P2, "bar", $P3
pop_eh
ok(0, "'No such attribute' exception not thrown")
goto end
handler:
.exception_is( "No such attribute 'bar'" )
end:
.end
.sub setting_non_existent_attribute_by_name
newclass $P1, "Foo47"
new $P2, $P1
new $P3, ['Integer']
push_eh handler
setattribute $P2, ["Foo47"], "no_such", $P3
pop_eh
ok(0, "'No such attribute' exception not thrown")
goto end
handler:
.exception_is( "No such attribute 'no_such' in class 'Foo47'" )
end:
.end
.sub getting_null_attribute
newclass $P1, "Foo51"
addattribute $P1, "i"
new $P2, "Foo51"
getattribute $P3, $P2, "i"
isnull $I0, $P3
is($I0, 1, "null attribute is null")
.end
.sub getting_non_existent_attribute
newclass $P1, "Foo52"
$P2 = $P1.'new'()
push_eh handler
getattribute $P3, $P2, "bar"
pop_eh
ok(0, "'No such attribute' exception not thrown")
goto end
handler:
.exception_is( "No such attribute 'bar'" )
end:
.end
.sub addparent_exceptions_1
newclass $P0, "Astronomical Object 2"
new $P1, ['String']
set $P1, "Not a class"
push_eh handler
addparent $P0, $P1
pop_eh
ok(0, "'Parent isn\'t a Class' exception not thrown")
goto end
handler:
.exception_is( "Parent 'Not a class' of 'Astronomical Object 2' isn't a Class" )
end:
.end
.sub addparent_exceptions_2
new $P0, ['Hash']
newclass $P1, "Trashcan"
push_eh handler
addparent $P0, $P1
pop_eh
ok(0, "'Only classes can be subclassed' exception not thrown")
goto end
handler:
.exception_is( "Only classes can be subclassed" )
end:
.end
.sub subclassing_a_non_existent_class
push_eh handler
subclass $P1, "Character", "Nemo"
pop_eh
ok(0, "nonexistent class exception not thrown")
goto end
handler:
.exception_is( "Class 'Character' doesn't exist" )
end:
.end
.sub anon_subclass_of_non_existent_class
push_eh handler
subclass $P1, "Character"
pop_eh
ok(0, "nonexistent class exception not thrown")
goto end
handler:
.exception_is( "Class 'Character' doesn't exist" )
end:
.end
.sub addattribute_duplicate
newclass $P1, "Foo53"
addattribute $P1, "i"
addattribute $P1, "j"
push_eh handler
addattribute $P1, "i"
pop_eh
ok(0, "attribute already exists exception not thrown")
goto end
handler:
.exception_is( "Attribute 'i' already exists in 'Foo53'" )
end:
.end
.sub wrong_way_to_create_new_objects
push_eh handler
new $P0, ['Object']
pop_eh
ok(0, "object instantiation exception not thrown")
goto end
handler:
.exception_is( "Object must be created by a class" )
end:
.end
.sub attribute_values__subclassing_access_meths
newclass $P1, "Foo54"
# must add attributes before object instantiation
addattribute $P1, "i"
addattribute $P1, "j"
# define attrib access functions in Foo54 namespace
get_global $P5, "Foo54__set"
addmethod $P1, "Foo54__set", $P5
get_global $P5, "Foo54__get"
addmethod $P1, "Foo54__get", $P5
subclass $P2, $P1, "Bar54"
addattribute $P2, "k"
addattribute $P2, "l"
get_global $P5, "Bar54__set"
addmethod $P2, "Bar54__set", $P5
get_global $P5, "Bar54__get"
addmethod $P2, "Bar54__get", $P5
# instantiate a Bar54 object
new $P13, "Bar54"
# Foo54 and Bar54 have attribute accessor methods
new $P5, ['String'] # set attribute values
set $P5, "i" # attribute slots have reference semantics
set_args "0,0,0", $P13, $P5, "i"
callmethodcc $P13, "Foo54__set"
get_results ""
new $P5, ['String']
set $P5, "j"
set_args "0,0,0", $P13, $P5, "j"
callmethodcc $P13,"Foo54__set"
get_results ""
new $P5, ['String']
set $P5, "k"
set_args "0,0,0", $P13, $P5, "k"
callmethodcc $P13,"Bar54__set"
get_results ""
new $P5, ['String']
set $P5, "l"
set_args "0,0,0", $P13, $P5, "l"
callmethodcc $P13,"Bar54__set"
get_results ""
# now retrieve attributes
set_args "0,0", $P13, "i"
callmethodcc $P13,"Foo54__get"
get_results "0", $P5
is( $P5, "i", 'got attrib i from Bar54->Foo54__get' )
set_args "0,0", $P13, "j"
callmethodcc $P13,"Foo54__get"
get_results "0", $P5
is( $P5, "j", 'got attrib j from Bar54->Foo54__get' )
set_args "0,0", $P13, "k"
callmethodcc $P13,"Bar54__get"
get_results "0", $P5
is( $P5, "k", 'got attrib k from Bar54->Bar54__get' )
set_args "0,0", $P13, "l"
callmethodcc $P13, "Bar54__get"
get_results "0", $P5
is( $P5, "l", 'got attrib l from Bar54->Bar54__get' )
.end
# set(obj: Pvalue, Iattr_idx)
.sub Foo54__set
get_params "0,0,0", $P2, $P5, $S4
ok( 1, "in Foo54__set" )
setattribute $P2, $S4, $P5
set_returns ""
returncc
.end
# Pattr = get(obj: Iattr_idx)
.sub Foo54__get
get_params "0,0", $P2, $S4
ok( 1, "in Foo54__get" )
getattribute $P5, $P2, $S4
set_returns "0", $P5
returncc
.end
.sub Bar54__set
get_params "0,0,0", $P2, $P5, $S4
ok( 1, "in Bar54__set" )
setattribute $P2, $S4, $P5
set_returns ""
returncc
.end
.sub Bar54__get
get_params "0,0", $P2, $S4
ok( 1, "in Bar54__get" )
getattribute $P5, $P2, $S4
set_returns "0", $P5
returncc
.end
.sub attribute_values__inherited_access_meths
newclass $P1, "Foo56"
# must add attributes before object instantiation
addattribute $P1, "i"
addattribute $P1, "j"
# define attrib access functions
get_global $P5, "set"
addmethod $P1, "set", $P5
get_global $P5, "get"
addmethod $P1, "get", $P5
subclass $P2, $P1, "Bar56"
addattribute $P2, "k"
addattribute $P2, "l"
addattribute $P2, "m"
# subclass is preferred for the SI case over
# newclass $P2, "Bar56"
# addattrib ...
# addparent $P2, $P1
# which is suitable for adding multiple parents to one class
# instantiate a Bar56 object
new $P2, "Bar56"
# Foo56 and Bar56 have attribute accessor methods
new $P5, ['String'] # set attribute values
set $P5, "i" # attribute slots have reference semantics
set_args "0,0,0,0", $P2, $P5, "Foo56", "i"
callmethodcc $P2, "set"
new $P5, ['String']
set $P5, "j"
set_args "0,0,0,0", $P2, $P5, "Foo56", "j"
callmethodcc $P2, "set"
new $P5, ['String']
set $P5, "k"
set_args "0,0,0,0", $P2, $P5, "Bar56", "k"
callmethodcc $P2, "set"
new $P5, ['String']
set $P5, "l"
set_args "0,0,0,0", $P2, $P5, "Bar56", "l"
callmethodcc $P2, "set"
new $P5, ['String']
set $P5, "m"
set_args "0,0,0,0", $P2, $P5, "Bar56", "m"
callmethodcc $P2, "set"
# now retrieve attributes
set_args "0,0,0", $P2, "Foo56", "i"
callmethodcc $P2, "get"
get_results "0", $P5
is( $P5, 'i', 'got attrib i from subclass through parent method' )
set_args "0,0,0", $P2, "Foo56", "j"
callmethodcc $P2, "get"
get_results "0", $P5
is( $P5, "j", 'got attrib i from subclass through parent method' )
set_args "0,0,0", $P2, "Bar56", "k"
callmethodcc $P2, "get"
get_results "0", $P5
is( $P5, "k", 'got attrib i from subclass through parent method' )
set_args "0,0,0", $P2, "Bar56", "l"
callmethodcc $P2, "get"
get_results "0", $P5
is( $P5, "l", 'got attrib i from subclass through parent method' )
set_args "0,0,0", $P2, "Bar56", "m"
callmethodcc $P2, "get"
get_results "0", $P5
is( $P5, "m", 'got attrib i from subclass through parent method' )
.end
# Foo56 provides accessor functions which Bar56 inherits
# they take an additional classname argument SClass
# set(obj: Pvalue, SClass, Sattr)
.sub set
get_params "0,0,0,0", $P2, $P5, $S4, $S5
setattribute $P2, $S5, $P5
set_returns ""
returncc
.end
# Pattr = get(obj: SClass, Sattr)
.sub get
get_params "0,0,0", $P2, $S4, $S5
getattribute $P5, $P2, $S5
set_returns "0", $P5
returncc
.end
# Local Variables:
# mode: pir
# fill-column: 100
# End:
# vim: expandtab shiftwidth=4 ft=pir:
| 23.466479 | 84 | 0.600821 |
73d3c1cfd764572a9c87c7277516027449260e31 | 25,880 | pm | Perl | windows/cperl/lib/IO/Compress/RawDeflate.pm | SCOTT-HAMILTON/Monetcours | 66a2970218a31e9987a4e7eb37443c54f22e6825 | [
"MIT"
] | 2 | 2021-02-18T02:10:12.000Z | 2022-02-07T13:19:09.000Z | windows/cperl/lib/IO/Compress/RawDeflate.pm | SCOTT-HAMILTON/Monetcours | 66a2970218a31e9987a4e7eb37443c54f22e6825 | [
"MIT"
] | 1 | 2017-05-27T05:49:30.000Z | 2017-05-27T05:49:30.000Z | windows/cperl/lib/IO/Compress/RawDeflate.pm | SCOTT-HAMILTON/Monetcours | 66a2970218a31e9987a4e7eb37443c54f22e6825 | [
"MIT"
] | 3 | 2017-05-22T18:40:12.000Z | 2017-05-24T18:45:08.000Z | package IO::Compress::RawDeflate ;
# create RFC1951
#
use strict ;
use warnings;
use bytes;
use IO::Compress::Base 2.086 ;
use IO::Compress::Base::Common 2.086 qw(:Status );
use IO::Compress::Adapter::Deflate 2.086 ;
require Exporter ;
our ($VERSION, @ISA, @EXPORT_OK, %DEFLATE_CONSTANTS, %EXPORT_TAGS, $RawDeflateError);
$VERSION = '2.086';
$RawDeflateError = '';
@ISA = qw(IO::Compress::Base Exporter);
@EXPORT_OK = qw( $RawDeflateError rawdeflate ) ;
push @EXPORT_OK, @IO::Compress::Adapter::Deflate::EXPORT_OK ;
%EXPORT_TAGS = %IO::Compress::Adapter::Deflate::DEFLATE_CONSTANTS;
{
my %seen;
foreach (keys %EXPORT_TAGS )
{
push @{$EXPORT_TAGS{constants}},
grep { !$seen{$_}++ }
@{ $EXPORT_TAGS{$_} }
}
$EXPORT_TAGS{all} = $EXPORT_TAGS{constants} ;
}
%DEFLATE_CONSTANTS = %EXPORT_TAGS;
#push @{ $EXPORT_TAGS{all} }, @EXPORT_OK ;
Exporter::export_ok_tags('all');
sub new
{
my $class = shift ;
my $obj = IO::Compress::Base::Common::createSelfTiedObject($class, \$RawDeflateError);
return $obj->_create(undef, @_);
}
sub rawdeflate
{
my $obj = IO::Compress::Base::Common::createSelfTiedObject(undef, \$RawDeflateError);
return $obj->_def(@_);
}
sub ckParams
{
my $self = shift ;
my $got = shift;
return 1 ;
}
sub mkComp
{
my $self = shift ;
my $got = shift ;
my ($obj, $errstr, $errno) = IO::Compress::Adapter::Deflate::mkCompObject(
$got->getValue('crc32'),
$got->getValue('adler32'),
$got->getValue('level'),
$got->getValue('strategy')
);
return $self->saveErrorString(undef, $errstr, $errno)
if ! defined $obj;
return $obj;
}
sub mkHeader
{
my $self = shift ;
return '';
}
sub mkTrailer
{
my $self = shift ;
return '';
}
sub mkFinalTrailer
{
return '';
}
#sub newHeader
#{
# my $self = shift ;
# return '';
#}
sub getExtraParams
{
my $self = shift ;
return getZlibParams();
}
use IO::Compress::Base::Common 2.086 qw(:Parse);
use Compress::Raw::Zlib 2.086 qw(Z_DEFLATED Z_DEFAULT_COMPRESSION Z_DEFAULT_STRATEGY);
our %PARAMS = (
#'method' => [IO::Compress::Base::Common::Parse_unsigned, Z_DEFLATED],
'level' => [IO::Compress::Base::Common::Parse_signed, Z_DEFAULT_COMPRESSION],
'strategy' => [IO::Compress::Base::Common::Parse_signed, Z_DEFAULT_STRATEGY],
'crc32' => [IO::Compress::Base::Common::Parse_boolean, 0],
'adler32' => [IO::Compress::Base::Common::Parse_boolean, 0],
'merge' => [IO::Compress::Base::Common::Parse_boolean, 0],
);
sub getZlibParams
{
return %PARAMS;
}
sub getInverseClass
{
return ('IO::Uncompress::RawInflate',
\$IO::Uncompress::RawInflate::RawInflateError);
}
sub getFileInfo
{
my $self = shift ;
my $params = shift;
my $file = shift ;
}
use Fcntl qw(SEEK_SET);
sub createMerge
{
my $self = shift ;
my $outValue = shift ;
my $outType = shift ;
my ($invClass, $error_ref) = $self->getInverseClass();
eval "require $invClass"
or die "aaaahhhh" ;
my $inf = $invClass->new( $outValue,
Transparent => 0,
#Strict => 1,
AutoClose => 0,
Scan => 1)
or return $self->saveErrorString(undef, "Cannot create InflateScan object: $$error_ref" ) ;
my $end_offset = 0;
$inf->scan()
or return $self->saveErrorString(undef, "Error Scanning: $$error_ref", $inf->errorNo) ;
$inf->zap($end_offset)
or return $self->saveErrorString(undef, "Error Zapping: $$error_ref", $inf->errorNo) ;
my $def = *$self->{Compress} = $inf->createDeflate();
*$self->{Header} = *$inf->{Info}{Header};
*$self->{UnCompSize} = *$inf->{UnCompSize}->clone();
*$self->{CompSize} = *$inf->{CompSize}->clone();
# TODO -- fix this
#*$self->{CompSize} = new U64(0, *$self->{UnCompSize_32bit});
if ( $outType eq 'buffer')
{ substr( ${ *$self->{Buffer} }, $end_offset) = '' }
elsif ($outType eq 'handle' || $outType eq 'filename') {
*$self->{FH} = *$inf->{FH} ;
delete *$inf->{FH};
*$self->{FH}->flush() ;
*$self->{Handle} = 1 if $outType eq 'handle';
#seek(*$self->{FH}, $end_offset, SEEK_SET)
*$self->{FH}->seek($end_offset, SEEK_SET)
or return $self->saveErrorString(undef, $!, $!) ;
}
return $def ;
}
#### zlib specific methods
sub deflateParams
{
my $self = shift ;
my $level = shift ;
my $strategy = shift ;
my $status = *$self->{Compress}->deflateParams(Level => $level, Strategy => $strategy) ;
return $self->saveErrorString(0, *$self->{Compress}{Error}, *$self->{Compress}{ErrorNo})
if $status == STATUS_ERROR;
return 1;
}
1;
__END__
=head1 NAME
IO::Compress::RawDeflate - Write RFC 1951 files/buffers
=head1 SYNOPSIS
use IO::Compress::RawDeflate qw(rawdeflate $RawDeflateError) ;
my $status = rawdeflate $input => $output [,OPTS]
or die "rawdeflate failed: $RawDeflateError\n";
my $z = new IO::Compress::RawDeflate $output [,OPTS]
or die "rawdeflate failed: $RawDeflateError\n";
$z->print($string);
$z->printf($format, $string);
$z->write($string);
$z->syswrite($string [, $length, $offset]);
$z->flush();
$z->tell();
$z->eof();
$z->seek($position, $whence);
$z->binmode();
$z->fileno();
$z->opened();
$z->autoflush();
$z->input_line_number();
$z->newStream( [OPTS] );
$z->deflateParams();
$z->close() ;
$RawDeflateError ;
# IO::File mode
print $z $string;
printf $z $format, $string;
tell $z
eof $z
seek $z, $position, $whence
binmode $z
fileno $z
close $z ;
=head1 DESCRIPTION
This module provides a Perl interface that allows writing compressed
data to files or buffer as defined in RFC 1951.
Note that RFC 1951 data is not a good choice of compression format
to use in isolation, especially if you want to auto-detect it.
For reading RFC 1951 files/buffers, see the companion module
L<IO::Uncompress::RawInflate|IO::Uncompress::RawInflate>.
=head1 Functional Interface
A top-level function, C<rawdeflate>, is provided to carry out
"one-shot" compression between buffers and/or files. For finer
control over the compression process, see the L</"OO Interface">
section.
use IO::Compress::RawDeflate qw(rawdeflate $RawDeflateError) ;
rawdeflate $input_filename_or_reference => $output_filename_or_reference [,OPTS]
or die "rawdeflate failed: $RawDeflateError\n";
The functional interface needs Perl5.005 or better.
=head2 rawdeflate $input_filename_or_reference => $output_filename_or_reference [, OPTS]
C<rawdeflate> expects at least two parameters,
C<$input_filename_or_reference> and C<$output_filename_or_reference>.
=head3 The C<$input_filename_or_reference> parameter
The parameter, C<$input_filename_or_reference>, is used to define the
source of the uncompressed data.
It can take one of the following forms:
=over 5
=item A filename
If the <$input_filename_or_reference> parameter is a simple scalar, it is
assumed to be a filename. This file will be opened for reading and the
input data will be read from it.
=item A filehandle
If the C<$input_filename_or_reference> parameter is a filehandle, the input
data will be read from it. The string '-' can be used as an alias for
standard input.
=item A scalar reference
If C<$input_filename_or_reference> is a scalar reference, the input data
will be read from C<$$input_filename_or_reference>.
=item An array reference
If C<$input_filename_or_reference> is an array reference, each element in
the array must be a filename.
The input data will be read from each file in turn.
The complete array will be walked to ensure that it only
contains valid filenames before any data is compressed.
=item An Input FileGlob string
If C<$input_filename_or_reference> is a string that is delimited by the
characters "<" and ">" C<rawdeflate> will assume that it is an
I<input fileglob string>. The input is the list of files that match the
fileglob.
See L<File::GlobMapper|File::GlobMapper> for more details.
=back
If the C<$input_filename_or_reference> parameter is any other type,
C<undef> will be returned.
=head3 The C<$output_filename_or_reference> parameter
The parameter C<$output_filename_or_reference> is used to control the
destination of the compressed data. This parameter can take one of
these forms.
=over 5
=item A filename
If the C<$output_filename_or_reference> parameter is a simple scalar, it is
assumed to be a filename. This file will be opened for writing and the
compressed data will be written to it.
=item A filehandle
If the C<$output_filename_or_reference> parameter is a filehandle, the
compressed data will be written to it. The string '-' can be used as
an alias for standard output.
=item A scalar reference
If C<$output_filename_or_reference> is a scalar reference, the
compressed data will be stored in C<$$output_filename_or_reference>.
=item An Array Reference
If C<$output_filename_or_reference> is an array reference,
the compressed data will be pushed onto the array.
=item An Output FileGlob
If C<$output_filename_or_reference> is a string that is delimited by the
characters "<" and ">" C<rawdeflate> will assume that it is an
I<output fileglob string>. The output is the list of files that match the
fileglob.
When C<$output_filename_or_reference> is an fileglob string,
C<$input_filename_or_reference> must also be a fileglob string. Anything
else is an error.
See L<File::GlobMapper|File::GlobMapper> for more details.
=back
If the C<$output_filename_or_reference> parameter is any other type,
C<undef> will be returned.
=head2 Notes
When C<$input_filename_or_reference> maps to multiple files/buffers and
C<$output_filename_or_reference> is a single
file/buffer the input files/buffers will be stored
in C<$output_filename_or_reference> as a concatenated series of compressed data streams.
=head2 Optional Parameters
Unless specified below, the optional parameters for C<rawdeflate>,
C<OPTS>, are the same as those used with the OO interface defined in the
L</"Constructor Options"> section below.
=over 5
=item C<< AutoClose => 0|1 >>
This option applies to any input or output data streams to
C<rawdeflate> that are filehandles.
If C<AutoClose> is specified, and the value is true, it will result in all
input and/or output filehandles being closed once C<rawdeflate> has
completed.
This parameter defaults to 0.
=item C<< BinModeIn => 0|1 >>
This option is now a no-op. All files will be read in binmode.
=item C<< Append => 0|1 >>
The behaviour of this option is dependent on the type of output data
stream.
=over 5
=item * A Buffer
If C<Append> is enabled, all compressed data will be append to the end of
the output buffer. Otherwise the output buffer will be cleared before any
compressed data is written to it.
=item * A Filename
If C<Append> is enabled, the file will be opened in append mode. Otherwise
the contents of the file, if any, will be truncated before any compressed
data is written to it.
=item * A Filehandle
If C<Append> is enabled, the filehandle will be positioned to the end of
the file via a call to C<seek> before any compressed data is
written to it. Otherwise the file pointer will not be moved.
=back
When C<Append> is specified, and set to true, it will I<append> all compressed
data to the output data stream.
So when the output is a filehandle it will carry out a seek to the eof
before writing any compressed data. If the output is a filename, it will be opened for
appending. If the output is a buffer, all compressed data will be
appended to the existing buffer.
Conversely when C<Append> is not specified, or it is present and is set to
false, it will operate as follows.
When the output is a filename, it will truncate the contents of the file
before writing any compressed data. If the output is a filehandle
its position will not be changed. If the output is a buffer, it will be
wiped before any compressed data is output.
Defaults to 0.
=back
=head2 Examples
To read the contents of the file C<file1.txt> and write the compressed
data to the file C<file1.txt.1951>.
use strict ;
use warnings ;
use IO::Compress::RawDeflate qw(rawdeflate $RawDeflateError) ;
my $input = "file1.txt";
rawdeflate $input => "$input.1951"
or die "rawdeflate failed: $RawDeflateError\n";
To read from an existing Perl filehandle, C<$input>, and write the
compressed data to a buffer, C<$buffer>.
use strict ;
use warnings ;
use IO::Compress::RawDeflate qw(rawdeflate $RawDeflateError) ;
use IO::File ;
my $input = new IO::File "<file1.txt"
or die "Cannot open 'file1.txt': $!\n" ;
my $buffer ;
rawdeflate $input => \$buffer
or die "rawdeflate failed: $RawDeflateError\n";
To compress all files in the directory "/my/home" that match "*.txt"
and store the compressed data in the same directory
use strict ;
use warnings ;
use IO::Compress::RawDeflate qw(rawdeflate $RawDeflateError) ;
rawdeflate '</my/home/*.txt>' => '<*.1951>'
or die "rawdeflate failed: $RawDeflateError\n";
and if you want to compress each file one at a time, this will do the trick
use strict ;
use warnings ;
use IO::Compress::RawDeflate qw(rawdeflate $RawDeflateError) ;
for my $input ( glob "/my/home/*.txt" )
{
my $output = "$input.1951" ;
rawdeflate $input => $output
or die "Error compressing '$input': $RawDeflateError\n";
}
=head1 OO Interface
=head2 Constructor
The format of the constructor for C<IO::Compress::RawDeflate> is shown below
my $z = new IO::Compress::RawDeflate $output [,OPTS]
or die "IO::Compress::RawDeflate failed: $RawDeflateError\n";
It returns an C<IO::Compress::RawDeflate> object on success and undef on failure.
The variable C<$RawDeflateError> will contain an error message on failure.
If you are running Perl 5.005 or better the object, C<$z>, returned from
IO::Compress::RawDeflate can be used exactly like an L<IO::File|IO::File> filehandle.
This means that all normal output file operations can be carried out
with C<$z>.
For example, to write to a compressed file/buffer you can use either of
these forms
$z->print("hello world\n");
print $z "hello world\n";
The mandatory parameter C<$output> is used to control the destination
of the compressed data. This parameter can take one of these forms.
=over 5
=item A filename
If the C<$output> parameter is a simple scalar, it is assumed to be a
filename. This file will be opened for writing and the compressed data
will be written to it.
=item A filehandle
If the C<$output> parameter is a filehandle, the compressed data will be
written to it.
The string '-' can be used as an alias for standard output.
=item A scalar reference
If C<$output> is a scalar reference, the compressed data will be stored
in C<$$output>.
=back
If the C<$output> parameter is any other type, C<IO::Compress::RawDeflate>::new will
return undef.
=head2 Constructor Options
C<OPTS> is any combination of the following options:
=over 5
=item C<< AutoClose => 0|1 >>
This option is only valid when the C<$output> parameter is a filehandle. If
specified, and the value is true, it will result in the C<$output> being
closed once either the C<close> method is called or the C<IO::Compress::RawDeflate>
object is destroyed.
This parameter defaults to 0.
=item C<< Append => 0|1 >>
Opens C<$output> in append mode.
The behaviour of this option is dependent on the type of C<$output>.
=over 5
=item * A Buffer
If C<$output> is a buffer and C<Append> is enabled, all compressed data
will be append to the end of C<$output>. Otherwise C<$output> will be
cleared before any data is written to it.
=item * A Filename
If C<$output> is a filename and C<Append> is enabled, the file will be
opened in append mode. Otherwise the contents of the file, if any, will be
truncated before any compressed data is written to it.
=item * A Filehandle
If C<$output> is a filehandle, the file pointer will be positioned to the
end of the file via a call to C<seek> before any compressed data is written
to it. Otherwise the file pointer will not be moved.
=back
This parameter defaults to 0.
=item C<< Merge => 0|1 >>
This option is used to compress input data and append it to an existing
compressed data stream in C<$output>. The end result is a single compressed
data stream stored in C<$output>.
It is a fatal error to attempt to use this option when C<$output> is not an
RFC 1951 data stream.
There are a number of other limitations with the C<Merge> option:
=over 5
=item 1
This module needs to have been built with zlib 1.2.1 or better to work. A
fatal error will be thrown if C<Merge> is used with an older version of
zlib.
=item 2
If C<$output> is a file or a filehandle, it must be seekable.
=back
This parameter defaults to 0.
=item -Level
Defines the compression level used by zlib. The value should either be
a number between 0 and 9 (0 means no compression and 9 is maximum
compression), or one of the symbolic constants defined below.
Z_NO_COMPRESSION
Z_BEST_SPEED
Z_BEST_COMPRESSION
Z_DEFAULT_COMPRESSION
The default is Z_DEFAULT_COMPRESSION.
Note, these constants are not imported by C<IO::Compress::RawDeflate> by default.
use IO::Compress::RawDeflate qw(:strategy);
use IO::Compress::RawDeflate qw(:constants);
use IO::Compress::RawDeflate qw(:all);
=item -Strategy
Defines the strategy used to tune the compression. Use one of the symbolic
constants defined below.
Z_FILTERED
Z_HUFFMAN_ONLY
Z_RLE
Z_FIXED
Z_DEFAULT_STRATEGY
The default is Z_DEFAULT_STRATEGY.
=item C<< Strict => 0|1 >>
This is a placeholder option.
=back
=head2 Examples
TODO
=head1 Methods
=head2 print
Usage is
$z->print($data)
print $z $data
Compresses and outputs the contents of the C<$data> parameter. This
has the same behaviour as the C<print> built-in.
Returns true if successful.
=head2 printf
Usage is
$z->printf($format, $data)
printf $z $format, $data
Compresses and outputs the contents of the C<$data> parameter.
Returns true if successful.
=head2 syswrite
Usage is
$z->syswrite $data
$z->syswrite $data, $length
$z->syswrite $data, $length, $offset
Compresses and outputs the contents of the C<$data> parameter.
Returns the number of uncompressed bytes written, or C<undef> if
unsuccessful.
=head2 write
Usage is
$z->write $data
$z->write $data, $length
$z->write $data, $length, $offset
Compresses and outputs the contents of the C<$data> parameter.
Returns the number of uncompressed bytes written, or C<undef> if
unsuccessful.
=head2 flush
Usage is
$z->flush;
$z->flush($flush_type);
Flushes any pending compressed data to the output file/buffer.
This method takes an optional parameter, C<$flush_type>, that controls
how the flushing will be carried out. By default the C<$flush_type>
used is C<Z_FINISH>. Other valid values for C<$flush_type> are
C<Z_NO_FLUSH>, C<Z_SYNC_FLUSH>, C<Z_FULL_FLUSH> and C<Z_BLOCK>. It is
strongly recommended that you only set the C<flush_type> parameter if
you fully understand the implications of what it does - overuse of C<flush>
can seriously degrade the level of compression achieved. See the C<zlib>
documentation for details.
Returns true on success.
=head2 tell
Usage is
$z->tell()
tell $z
Returns the uncompressed file offset.
=head2 eof
Usage is
$z->eof();
eof($z);
Returns true if the C<close> method has been called.
=head2 seek
$z->seek($position, $whence);
seek($z, $position, $whence);
Provides a sub-set of the C<seek> functionality, with the restriction
that it is only legal to seek forward in the output file/buffer.
It is a fatal error to attempt to seek backward.
Empty parts of the file/buffer will have NULL (0x00) bytes written to them.
The C<$whence> parameter takes one the usual values, namely SEEK_SET,
SEEK_CUR or SEEK_END.
Returns 1 on success, 0 on failure.
=head2 binmode
Usage is
$z->binmode
binmode $z ;
This is a noop provided for completeness.
=head2 opened
$z->opened()
Returns true if the object currently refers to a opened file/buffer.
=head2 autoflush
my $prev = $z->autoflush()
my $prev = $z->autoflush(EXPR)
If the C<$z> object is associated with a file or a filehandle, this method
returns the current autoflush setting for the underlying filehandle. If
C<EXPR> is present, and is non-zero, it will enable flushing after every
write/print operation.
If C<$z> is associated with a buffer, this method has no effect and always
returns C<undef>.
B<Note> that the special variable C<$|> B<cannot> be used to set or
retrieve the autoflush setting.
=head2 input_line_number
$z->input_line_number()
$z->input_line_number(EXPR)
This method always returns C<undef> when compressing.
=head2 fileno
$z->fileno()
fileno($z)
If the C<$z> object is associated with a file or a filehandle, C<fileno>
will return the underlying file descriptor. Once the C<close> method is
called C<fileno> will return C<undef>.
If the C<$z> object is associated with a buffer, this method will return
C<undef>.
=head2 close
$z->close() ;
close $z ;
Flushes any pending compressed data and then closes the output file/buffer.
For most versions of Perl this method will be automatically invoked if
the IO::Compress::RawDeflate object is destroyed (either explicitly or by the
variable with the reference to the object going out of scope). The
exceptions are Perl versions 5.005 through 5.00504 and 5.8.0. In
these cases, the C<close> method will be called automatically, but
not until global destruction of all live objects when the program is
terminating.
Therefore, if you want your scripts to be able to run on all versions
of Perl, you should call C<close> explicitly and not rely on automatic
closing.
Returns true on success, otherwise 0.
If the C<AutoClose> option has been enabled when the IO::Compress::RawDeflate
object was created, and the object is associated with a file, the
underlying file will also be closed.
=head2 newStream([OPTS])
Usage is
$z->newStream( [OPTS] )
Closes the current compressed data stream and starts a new one.
OPTS consists of any of the options that are available when creating
the C<$z> object.
See the L</"Constructor Options"> section for more details.
=head2 deflateParams
Usage is
$z->deflateParams
TODO
=head1 Importing
A number of symbolic constants are required by some methods in
C<IO::Compress::RawDeflate>. None are imported by default.
=over 5
=item :all
Imports C<rawdeflate>, C<$RawDeflateError> and all symbolic
constants that can be used by C<IO::Compress::RawDeflate>. Same as doing this
use IO::Compress::RawDeflate qw(rawdeflate $RawDeflateError :constants) ;
=item :constants
Import all symbolic constants. Same as doing this
use IO::Compress::RawDeflate qw(:flush :level :strategy) ;
=item :flush
These symbolic constants are used by the C<flush> method.
Z_NO_FLUSH
Z_PARTIAL_FLUSH
Z_SYNC_FLUSH
Z_FULL_FLUSH
Z_FINISH
Z_BLOCK
=item :level
These symbolic constants are used by the C<Level> option in the constructor.
Z_NO_COMPRESSION
Z_BEST_SPEED
Z_BEST_COMPRESSION
Z_DEFAULT_COMPRESSION
=item :strategy
These symbolic constants are used by the C<Strategy> option in the constructor.
Z_FILTERED
Z_HUFFMAN_ONLY
Z_RLE
Z_FIXED
Z_DEFAULT_STRATEGY
=back
=head1 EXAMPLES
=head2 Apache::GZip Revisited
See L<IO::Compress::FAQ|IO::Compress::FAQ/"Apache::GZip Revisited">
=head2 Working with Net::FTP
See L<IO::Compress::FAQ|IO::Compress::FAQ/"Compressed files and Net::FTP">
=head1 SEE ALSO
L<Compress::Zlib>, L<IO::Compress::Gzip>, L<IO::Uncompress::Gunzip>, L<IO::Compress::Deflate>, L<IO::Uncompress::Inflate>, L<IO::Uncompress::RawInflate>, L<IO::Compress::Bzip2>, L<IO::Uncompress::Bunzip2>, L<IO::Compress::Lzma>, L<IO::Uncompress::UnLzma>, L<IO::Compress::Xz>, L<IO::Uncompress::UnXz>, L<IO::Compress::Lzip>, L<IO::Uncompress::UnLzip>, L<IO::Compress::Lzop>, L<IO::Uncompress::UnLzop>, L<IO::Compress::Lzf>, L<IO::Uncompress::UnLzf>, L<IO::Compress::Zstd>, L<IO::Uncompress::UnZstd>, L<IO::Uncompress::AnyInflate>, L<IO::Uncompress::AnyUncompress>
L<IO::Compress::FAQ|IO::Compress::FAQ>
L<File::GlobMapper|File::GlobMapper>, L<Archive::Zip|Archive::Zip>,
L<Archive::Tar|Archive::Tar>,
L<IO::Zlib|IO::Zlib>
For RFC 1950, 1951 and 1952 see
L<http://www.faqs.org/rfcs/rfc1950.html>,
L<http://www.faqs.org/rfcs/rfc1951.html> and
L<http://www.faqs.org/rfcs/rfc1952.html>
The I<zlib> compression library was written by Jean-loup Gailly
C<[email protected]> and Mark Adler C<[email protected]>.
The primary site for the I<zlib> compression library is
L<http://www.zlib.org>.
The primary site for gzip is L<http://www.gzip.org>.
=head1 AUTHOR
This module was written by Paul Marquess, C<[email protected]>.
=head1 MODIFICATION HISTORY
See the Changes file.
=head1 COPYRIGHT AND LICENSE
Copyright (c) 2005-2019 Paul Marquess. All rights reserved.
This program is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
| 26.167846 | 563 | 0.698725 |
edd89a78eaa58d789e2fc88db1c06308e3899876 | 1,885 | t | Perl | t/01-basic.t | lestrrat-p6/Crust-Middleware-Session | dda68790dd32589f9372e885e6d1aea2f085b35f | [
"Artistic-2.0"
] | 2 | 2015-11-01T02:32:07.000Z | 2016-02-11T10:21:28.000Z | t/01-basic.t | lestrrat-p6/Crust-Middleware-Session | dda68790dd32589f9372e885e6d1aea2f085b35f | [
"Artistic-2.0"
] | 1 | 2016-11-04T13:17:40.000Z | 2016-11-06T21:34:53.000Z | t/01-basic.t | lestrrat-p6/Crust-Middleware-Session | dda68790dd32589f9372e885e6d1aea2f085b35f | [
"Artistic-2.0"
] | 3 | 2016-11-04T12:02:39.000Z | 2017-04-26T01:58:31.000Z | use v6;
use Test;
use Crust::Middleware::Session;
use Cookie::Baker;
my &app = sub (%env) {
return [ 200, [ "Content-Type" => "text/plain" ], [ "Hello, World!" ] ];
};
subtest {
dies-ok {
Crust::Middleware::Session::SimpleSession.new()
}, "id should be required, so arg-less new() should die";
lives-ok {
my $s = Crust::Middleware::Session::SimpleSession.new(:id("foo"));
$s.id = "bar";
}, "can change id";
}, "SimpleSession";
subtest {
dies-ok { Crust::Middleware::Session.new() }, "missing :store dies";
lives-ok { Crust::Middleware::Session.new(
&app,
:store(Crust::Middleware::Session::Store::Memory.new())
) }, ":store exists, lives";
}, "instantiation";
subtest {
my $mw = Crust::Middleware::Session.new(
&app,
:store(Crust::Middleware::Session::Store::Memory.new())
);
is $mw.cookie-name, "crust-session", "default value for cookie-name";
is $mw.path, "/";
is $mw.keep-empty, True;
is $mw.secure, False;
is $mw.httponly, False;
ok $mw.sid-generator;
ok $mw.sid-validator;
}, "default values";
subtest {
my $cookie-name = "crust-session-test";
my $domain = "crust.p6.org";
my $path = "/foo/bar/";
my $mw = Crust::Middleware::Session.new(
&app,
:cookie-name($cookie-name),
:domain($domain),
:path($path),
:store(Crust::Middleware::Session::Store::Memory.new()),
);
my %env = (
HTTP_COOKIE => "",
);
my @res = $mw.(%env);
my %h = @res[1];
for %h.kv -> $k, $v {
if $k !~~ "Set-Cookie" {
next;
}
my %data = crush-cookie($v);
like %data{$cookie-name}, rx/^ <[0..9,a..f]>**40 $/;
is %data<domain>, $domain;
is %data<path>, $path;
}
}, "Call middleware";
done-testing;
| 25.133333 | 76 | 0.531565 |
edc88fd6d9d351ac7c8a459253bc5ea2389a7db4 | 1,988 | pl | Perl | maint/build.pl | LTD-Beget/cpanminus | 1f48ea8b93915a2771c29a48a7de720e795f3ed7 | [
"Artistic-1.0"
] | null | null | null | maint/build.pl | LTD-Beget/cpanminus | 1f48ea8b93915a2771c29a48a7de720e795f3ed7 | [
"Artistic-1.0"
] | 1 | 2015-10-22T17:39:56.000Z | 2015-10-22T18:30:47.000Z | maint/build.pl | LTD-Beget/cpanminus | 1f48ea8b93915a2771c29a48a7de720e795f3ed7 | [
"Artistic-1.0"
] | 1 | 2015-10-13T15:10:02.000Z | 2015-10-13T15:10:02.000Z | #!/usr/bin/env perl
use strict;
use File::pushd;
use File::Find;
=for developers
NAME DESCRIPTION repo CPAN | wget source CPAN
--------------------------------------------------------------------------------------------+--------------------
script/cpanm.PL frontend source YES NO |
lib/App/cpanminus/script.pm "the gut". YES NO | x
cpanm standalone, packed. #!/usr/bin/env (for cp) YES NO | x
bin/cpanm standalone, packed. #!perl (for EUMM) NO YES | x x
=cut
sub generate_file {
my($base, $target, $fatpack, $shebang_replace) = @_;
open my $in, "<", $base or die $!;
open my $out, ">", "$target.tmp" or die $!;
print STDERR "Generating $target from $base\n";
while (<$in>) {
next if /Auto-removed/;
s|^#!/usr/bin/env perl|$shebang_replace| if $shebang_replace;
s/DEVELOPERS:.*/DO NOT EDIT -- this is an auto generated file/;
s/.*__FATPACK__/$fatpack/;
print $out $_;
}
close $out;
unlink $target;
rename "$target.tmp", $target;
}
my $fatpack = `fatpack file`;
mkdir ".build", 0777;
system qw(cp -r fatlib lib .build/);
my $fatpack_compact = do {
my $dir = pushd '.build';
my @files;
my $want = sub {
push @files, $_ if /\.pm$/;
};
find({ wanted => $want, no_chdir => 1 }, "fatlib", "lib");
system 'perlstrip', '--cache', '-v', @files;
`fatpack file`;
};
generate_file('script/cpanm.PL', "cpanm", $fatpack_compact);
generate_file('script/cpanm.PL', "fatpacked/App/cpanminus/fatscript.pm", $fatpack, 'package App::cpanminus::fatscript;');
chmod 0755, "cpanm";
END {
unlink $_ for "cpanm.tmp", "fatpacked/App/cpanminus/fatscript.pm.tmp";
system "rm", "-r", ".build";
}
| 30.121212 | 121 | 0.48994 |
edcb1ea5d83844e6d79a0c6cb70148959676c127 | 14,442 | pl | Perl | probes/hadoop3/similarity_crazy.pl | sauloal/projects | 79068b20251fd29cadbc80a5de550fc77332788d | [
"MIT"
] | null | null | null | probes/hadoop3/similarity_crazy.pl | sauloal/projects | 79068b20251fd29cadbc80a5de550fc77332788d | [
"MIT"
] | null | null | null | probes/hadoop3/similarity_crazy.pl | sauloal/projects | 79068b20251fd29cadbc80a5de550fc77332788d | [
"MIT"
] | 1 | 2018-10-26T05:13:42.000Z | 2018-10-26T05:13:42.000Z | #!/usr/bin/perl -w
use strict;
my $logToFile = 0;
my $verbose = 4;
my $generateArray = 1;
my $saveArraytoFile = 1;
my $loadArrayFromFile = 0;
my $printArray = 1;
my $printArrayToFile = 0;
my $memory = 1;
my $tableSize = 3; #sides of the table
my $numberRegisters = 4; #number of columns per cell
my $fieldSize = 2; # number of bytes per cell
my $cellSize = $fieldSize * $numberRegisters; # in bytes
#my $blockSize = 65536;
my $bitSize = 12;
my $systemBinarity = 32;
print "TABLE SIZE : $tableSize x $tableSize (".($tableSize * $tableSize)." cells)
REGISTERS PER CELL: $numberRegisters (".($tableSize* $numberRegisters)." fields per line / ".(($tableSize * $tableSize) * $numberRegisters)." fields total)
BYTES PER REGISTER: $fieldSize (".join(" ", &convertBytes((($tableSize * $tableSize) * $numberRegisters * $fieldSize))).")\n";
my $dSize = "%06d";
my $format1 = "BIN %".$bitSize."s (%012d)\n";
#my $format2 = "BIN1 %".$bitSize."s (%012d) BIN2 %".$bitSize."s (%012d)\n";
#my $format3 = "BIN1 %".$bitSize."s (%012d) AND BIN2 %".$bitSize."s (%012d) = COMP %".$bitSize."s (%012d)\n";
my $logFile = $0 . ".log";
my $tableFile = $0 . ".tab";
my $dataFile = $0 . ".dat";
my $count = 0;
my $maxValue = -1;
if ( ! ( $generateArray || $loadArrayFromFile ))
{
print "ARRAY MUST COME FROM SOMEWHERE. PLEASE CHECK SETTINGS\n";
exit 1;
}
elsif ( ! ( $saveArraytoFile || $printArray ) && ( $memory ))
{
print "ARRAY MUST GO SOMEWHERE. PLEASE CHECK SETTINGS\n";
exit 2;
}
if ( $saveArraytoFile && ( ! $generateArray ))
{
$saveArraytoFile = 0;
}
if ( $printArrayToFile && ( ! $printArray ))
{
$printArrayToFile = 0;
}
my $std;
if ($logToFile)
{
open LOG, ">", $logFile or die "COULD NOT OPEN LOG FILE $logFile: $!";
$std = *LOG;
} else {
$std = *STDOUT;
}
my $array;
if ( $generateArray )
{
$array = &generateArray();
&saveArray($array) if ( $saveArraytoFile && $memory);
} else {
$array = &loadArray() if ( $loadArrayFromFile && $memory );
}
&printArray($array) if ( $printArray );
close LOG if $logToFile;
sub generateArray
{
print "#"x20 . "\n";
print "GENERATING ARRAY\n";
print "#"x20 . "\n";
my $startTime = time;
my $symArray = '';
if ( ! $memory )
{
unlink($dataFile);
open MEM, ">$dataFile" or die "COULD NOT OPEN MEMORY FILE $dataFile: $!";
binmode(MEM);
}
for my $lineNum (0 .. $tableSize - 1)
{
my $seqBin = &dec2bin($lineNum);
#my $revSeqBin = &rcBin($seqBin);
#my $revSeqNum = &bin2dec($revSeqBin);
my $bin1;
my $bin1Dec;
#if ($seqNum <= $revSeqNum)
#{
$bin1 = $seqBin;
$bin1Dec = $lineNum;
#} else {
#$bin1 = $revSeqBin;
#$bin1Dec = $revSeqNum;
#}
if ( $verbose )
{
#printf $format1, $seqBin, $lineNum;
my $size = &getSize(\$symArray);
#print &eta($startTime, time, 0, $lineNum, $tableSize - 1, \$symArray) . "\n";
}
#printf $format1, $seqBin, $seqNum, $revSeqBin, $revSeqNum, $bin1, $bin1Dec;
my $lineStartAbs = $lineNum * $tableSize * $numberRegisters;
my $lineEndAbs = $lineStartAbs + ( $tableSize * $numberRegisters ) - 1;
my $lVec = '';
print "LINE $lineNum :: LINE: start $lineStartAbs end $lineEndAbs\n";
my $cellStartRel = 0;
my $cellEndRel = $tableSize * $numberRegisters - 1;
my $cellStartAbs = $lineStartAbs;
my $cellEndAbs = $lineEndAbs;
print "\tCELL :: REL: start $cellStartRel end $cellEndRel | ABS: start $cellStartAbs end $cellEndAbs\n";
foreach my $colNum (0 .. $tableSize - 1)
{
my $colBin = &dec2bin($colNum);
#my $revColBin = &rcBin($colBin);
#my $revColNum = &bin2dec($revColBin);
my $bin2;
my $bin2Dec;
$bin2 = $colBin;
$bin2Dec = $colNum;
#printf "\t".$format1, $colBin, $colNum, $revColBin, $revColNum, $bin2, $bin2Dec;
#printf $format2, $bin1, $bin1Dec, $bin2, $bin2Dec;
#my $compORNum = (0+$bin1Dec | 0+$bin2Dec);
#my $compORBin = &dec2bin($compORNum);
#my $compXORNum = (0+$bin1Dec ^ 0+$bin2Dec);
#my $compXORBin = &dec2bin($compXORNum);
my $compANDDec = (0+$bin1Dec & 0+$bin2Dec);
my $compANDBin = &dec2bin($compANDDec);
#printf "\t\t$format3", $bin1, $bin1Dec, $bin2, $bin2Dec, $compANDBin, $compANDDec;
##print map "$_\n", ("BIN1 ".$bin1, "BIN2 ".$bin2, "AND ".$compANDBin, "OR ".$compORBin, "XOR ".$compXORBin);
#print "\n\n";
#$symArray[$bin1Dec][$bin2Dec] = $compANDDec;
# col1 col2 col3
#row1 1x1 2x1 2x3
#row2 2x1 2x2 2x3
#row3 3x1 3x2 3x3
# col1 col2 col3
#row1 1 2 3
#row2 4 5 6
#row3 7 8 9
#row |-----------|-----------|
#cell |-----|-----|-----|-----|
#register |-|-|-|-|-|-|-|-|-|-|-|-|
my $cellNumRelStart = 0;
my $cellNumRelEnd = $tableSize - 1;
my $cellNumAbsStart = ( $lineNum * $tableSize * $numberRegisters ) + ( $colNum * $numberRegisters );
my $cellNumAbsEnd = $cellNumAbsStart + $numberRegisters - 1;
print "\t\tCELL $colNum :: REL: start $cellNumRelStart end $cellNumRelEnd | ABS: start $cellNumAbsStart end $cellNumAbsEnd\n";
next;
#printf $std " $dSize x $dSize = $dSize ".
#"[LINE $dSize COL $dSize LINE START $dSize LINE END $dSize ".
#"CELL ABS NUM $dSize CELL ABS POS $dSize CEL REL POS $dSize ".
#"REGISTER ABS POS $dSize REGISTER REL POS $dSize] {$count}\n",
#$lineNum, $colNum, $lineStart, $lineEnd, $celNumAbs, $el,
#$compANDDec, $lineNum, $lineStart, $lineEnd, $relColPos, $absColPos if ( ($verbose > 1) || $logToFile);
#printf " BIN1DEC $dSize [$dSize] BIN2DEC $dSize [$dSize] COMPANDDEC $dSize [$dSize] COUNT $dSize [$dSize]\n", $bin1Dec, $relColPos+0, $bin2Dec, $relColPos+1, $compANDDec, $relColPos+2, $count, $relColPos+3 if ( ($verbose > 2) || $logToFile);
$maxValue = $bin1Dec > $maxValue ? $bin1Dec : $maxValue;
$maxValue = $bin2Dec > $maxValue ? $bin2Dec : $maxValue;
$maxValue = $compANDDec > $maxValue ? $compANDDec : $maxValue;
$maxValue = $count > $maxValue ? $count : $maxValue;
#vec($lVec, $relRegisterNum+0, ($fieldSize*8)) = $bin1Dec;
#vec($lVec, $relRegisterNum+1, ($fieldSize*8)) = $bin2Dec;
#vec($lVec, $relRegisterNum+2, ($fieldSize*8)) = $compANDDec;
#vec($lVec, $relRegisterNum+3, ($fieldSize*8)) = $count++;
}
if ( $memory )
{
$symArray .= $lVec;
} else {
use bytes;
#print "ADDING ". &getSize(\$lVec) ." TO FH AT POS " . (($registerStart*$fieldSize)+$fieldSize) . ". NEXT ".(&getSizeBytes(\$lVec)+($registerStart*$fieldSize))."\n";
#seek MEM, (($registerStart*$fieldSize)+$fieldSize), 0 or die "COULD NOT SEEK: $!";
print MEM $lVec;
}
}
if ( ! defined $symArray )
{
die "ERROR GENERATING ARRAY";
} else {
print "TOTAL $count\n";
}
if ( ! $memory )
{
close MEM;
}
return \$symArray;
}
exit;
sub saveArray
{
my $dat = $_[0];
print "#"x20 . "\n";
print "SAVING ARRAY\n";
die "NO DATA" if ( ! defined $dat );
die "NO DATA" if ( ! length($dat) );
print "SAVING " . &getSize($dat) . "\n";
print "#"x20 . "\n";
open DAT, ">$dataFile" or die "COULD NOT OPEN DAT FILE $dataFile: $!";
binmode DAT;
print DAT $$dat;
close DAT;
die "ERROR SAVING DAT" if ( ! -s $dataFile );
}
sub loadArray
{
print "#"x20 . "\n";
print "LOADING ARRAY\n";
print "#"x20 . "\n";
my $dat = '';
open DAT, "<$dataFile" or die "COULD NOT OPEN DATA FILE $dataFile: $!";
binmode DAT;
my $buffer;
while (
read(DAT, $buffer, 65536) and $dat .= $buffer
){};
close DAT;
die "ERROR LOADING ARRAY FROM FILE" if ( ! length($dat));
die "ERROR LOADING ARRAY FROM FILE" if ( ! defined $dat );
print "LOADED " . &getSize(\$dat) . "\n";
return \$dat;
}
sub printArray
{
print "#"x20 . "\n";
print "PRINTING ARRAY\n";
print "#"x20 . "\n";
my $data = $_[0];
my $blockSizeLen = length($maxValue);
my $celLen = $numberRegisters * $blockSizeLen;
my $bSize = "%0".$blockSizeLen."d";
my $tab;
if ( $printArrayToFile )
{
open TAB, ">$tableFile" or die "COULD NOT OPEN LOG FILE $tableFile: $!";
$tab = *TAB;
} else {
$tab = *STDOUT;
}
if ( ! $memory )
{
open MEM, "<$dataFile" or die "COULD NOT OPEN DATA FILE $dataFile: $!";
binmode MEM;
}
print $tab "_"x$blockSizeLen, "_|_";
my $sides = int((($numberRegisters*$blockSizeLen) + $numberRegisters - 1)/ 2) - $blockSizeLen;
print $tab map "_"x$sides . "_" . (sprintf($bSize, $_)) . "\_" . "_"x$sides .($numberRegisters % 2 ? "" : "_"). "_._", (0 .. $tableSize - 1);
print $tab "\n";
for my $registerNumber (0 .. $tableSize - 1)
{
printf $tab "$bSize | ", $registerNumber;
my $registerStart = $registerNumber * ( $tableSize * $numberRegisters );
my $registerEnd = $registerStart + ( ( $tableSize - 1 ) * $numberRegisters );
for my $colNumber (0 .. $tableSize - 1)
{
my $relColPos = $colNumber * $numberRegisters;
my $absColPos = $registerStart + $relColPos;
my $block = '';
if ( $memory )
{
for (my $nc = 0; $nc < $numberRegisters; $nc++)
{
print "ABS COL POS $absColPos+$nc = ".($absColPos+$nc)."\n";
$block .= vec($$data, $absColPos+$nc, ($fieldSize*8));
}
print "SIZE ".&getSize(\$block)."\n";
} else {
seek MEM, ($registerStart*$fieldSize), 0;
read MEM, $block, $cellSize, 0;
}
#printf $std " $dSize x $dSize = $dSize [REGISTER $dSize REGISTER START $dSize REGISTER END $dSize REL CELL POS $dSize ABS CELL POS $dSize]\n", 0, 0, 0, $registerNumber, $registerStart, $registerEnd, $relColPos, $absColPos if ( ($verbose > 3) || $logToFile);
for my $colCount (0 .. $numberRegisters - 1)
{
my $value;
if ( $memory )
{
#$value = vec($$data, $absColPos+$colCount, ($fieldSize*8));
$value = vec($block, $colCount, ($fieldSize*8));
} else {
#print "REGISTER $registerNumber START $registerStart REL COL POS $relColPos ABS COL POS $absColPos CELL POS ".($absColPos+$colCount)."\n";
$value = vec($block, $colCount, ($fieldSize*8));
}
printf $tab "$bSize ", $value;
}
print $tab ". ";
}
print $tab "\n";
}
close TAB if $printArrayToFile;
close MEM if ( ! $memory );
}
sub dec2bin
{
my $num = $_[0];
#print "D2B :: NUM $num\n";
my $bNum = unpack("B32", pack("N", $num));
#print "D2B :: BNUM $bNum\n";
$bNum = substr($bNum, -$bitSize);
#print "D2B :: BNUMF $bNum\n";
#$bNum =~ s/^0+(?=\d)//; # fix left numbers
return $bNum;
}
sub bin2dec
{
my $bNum = $_[0];
##print "B2D :: BNUM $bNum\n";
#my $bNum32 = substr("0"x$systemBinarity . $bNum, -$systemBinarity);
##print "B2D :: BNUM32 $bNum32\n";
#my $bNum32Pack = pack("B$systemBinarity", $bNum32);
##print "B2D :: BNUM32PACK $bNum32Pack\n";
##my $bNum32PackUnpack = unpack("N", $bNum32Pack);
#my $bNum32PackUnpack = unpack("N", $bNum32Pack);
##print "B2D :: BNUM32PACKUNPACK $bNum32PackUnpack\n";
#return $bNum32PackUnpack;
return unpack("N", pack("B$systemBinarity", substr("0"x$systemBinarity . $bNum, -$systemBinarity)));
}
sub rcBin
{
my $bNum = $_[0];
$bNum =~ tr/01/10/;
$bNum = reverse($bNum);
return $bNum;
}
sub getSize
{
my $var = $_[0];
my $bytes = &getSizeBytes($var);
my ($size, $unity) = &convertBytes($bytes);
return "$size $unity";
}
sub convertBytes
{
my $bytes = $_[0];
my $size;
my $unity;
my $kb = 1024;
my $mb = $kb * 1024;
my $gb = $mb * 1024;
if ( $bytes >= $gb )
{
$size = $bytes / $gb;
$unity = "Gb";
}
elsif ( $bytes >= $mb )
{
$size = $bytes / $mb;
$unity = "Mb";
}
elsif ( $bytes >= $kb )
{
$size = $bytes / $kb;
$unity = "Kb";
} else {
$size = $bytes;
$unity = "bytes";
}
if ( $unity ne 'bytes' )
{
$size = sprintf("%.2f", $size);
}
return ($size, $unity);
}
sub getSizeBytes
{
my $var = $_[0];
use bytes;
my $bytes;
if ( $memory )
{
$bytes = length($$var);
} else {
$bytes = -s $dataFile;
}
return $bytes;
}
sub eta
{
my $startT = $_[0];
my $currT = $_[1];
my $startC = $_[2];
my $currC = $_[3];
my $targetC = $_[4];
my $var = $_[5];
my $sizeCurr = &getSizeBytes($var);
my $elapsedT = $currT - $startT;
my $elapsedTstr = &convertSeconds($elapsedT);
my $elapsedC = $currC - $startC;
my $avgT = ( ! $elapsedT ? 1 : $elapsedT ) / ( ! $elapsedC ? 1 : $elapsedC );
my $avgTstr = &convertSeconds($avgT);
my $avgC = ( ! $elapsedC ? 1 : $elapsedC ) / ( ! $elapsedT ? 1 : $elapsedT );
my $leftC = $targetC - $currC;
my $leftT = $leftC * $avgT;
my $leftTstr = &convertSeconds($leftT);
my $sizeUnity = ( ! $sizeCurr ? 1 : $sizeCurr)/ ( ! $currC ? 1 : $currC );
my $sizeEnd = $sizeUnity * $targetC;
my ($sizeCurrNum , $sizeCurrUni ) = &convertBytes($sizeCurr);
my ($sizeEndNum , $sizeEndUni ) = &convertBytes($sizeEnd);
my ($sizeUnityNum, $sizeUnityUni) = &convertBytes($sizeUnity);
my $str = sprintf("CURR c:$currC :: TARGET c:$targetC ::".
" ELAPSED t:$elapsedTstr c:$elapsedC :: AVG t:%s s/c c:%.2f c/s :: LEFT t:%s c:$leftC ::".
" SIZE curr: $sizeCurrNum $sizeCurrUni final: $sizeEndNum $sizeEndUni [$sizeUnityNum $sizeUnityUni / unity]",
$avgTstr, $avgC, $leftTstr);
return $str;
}
sub convertSeconds
{
my $sec = $_[0];
my $cMin = 60;
my $cHour = $cMin * 60;
my $cDay = $cHour * 24;
my $secs;
my $mins;
my $hours;
my $days;
if ( $sec >= $cDay )
{
$days = int($sec / $cDay );
$sec -= $days * $cDay;
}
if ( $sec >= $cHour )
{
$hours = int($sec / $cHour );
$sec -= $hours * $cHour;
}
if ( $sec >= $cMin )
{
$mins = int($sec / $cMin );
$sec -= $mins * $cMin;
}
$sec = sprintf("%.2f", $sec);
my $str = ( $days ? "$days"."d " : '') . ( $hours ? "$hours"."h " : '') . ( $mins ? "$mins\" " : '') . ( $sec ? "$sec' " : '');
return $str;
}
1;
| 27.249057 | 262 | 0.548885 |
edd5111da16779bcbe3e9f893a0e6c15718107c5 | 947 | pm | Perl | lib/Business/EDI/CodeList/FacilityTypeDescriptionCode.pm | atz/Business-EDI | 27514f11f91bccda85b8b411074d3dddbc8b28b1 | [
"Artistic-1.0-cl8"
] | 1 | 2015-10-29T13:18:40.000Z | 2015-10-29T13:18:40.000Z | lib/Business/EDI/CodeList/FacilityTypeDescriptionCode.pm | atz/Business-EDI | 27514f11f91bccda85b8b411074d3dddbc8b28b1 | [
"Artistic-1.0-cl8"
] | null | null | null | lib/Business/EDI/CodeList/FacilityTypeDescriptionCode.pm | atz/Business-EDI | 27514f11f91bccda85b8b411074d3dddbc8b28b1 | [
"Artistic-1.0-cl8"
] | null | null | null | package Business::EDI::CodeList::FacilityTypeDescriptionCode;
use base 'Business::EDI::CodeList';
my $VERSION = 0.02;
sub list_number {9039;}
my $usage = 'I';
# 9039 Facility type description code [I]
# Desc: Code specifying the facility type.
# Repr: an..3
my %code_hash = (
'1' => [ 'Movie',
'Movie viewing is available.' ],
'2' => [ 'Telephone',
'Telephone service is available.' ],
'3' => [ 'Telex',
'Telex service is available.' ],
'4' => [ 'Audio programming',
'Audio programming is available.' ],
'5' => [ 'Television',
'Television sets are available.' ],
'6' => [ 'Reservation booking service',
'Reservation booking service is available.' ],
'7' => [ 'Duty free sales',
'Duty free sales are available.' ],
'8' => [ 'Smoking',
'The facility permits smoking.' ],
'9' => [ 'Non-smoking',
'The facility is non-smoking.' ],
);
sub get_codes { return \%code_hash; }
1;
| 27.057143 | 67 | 0.604013 |
ed7473e39abab349430535a9ed95bc3111f633b2 | 914 | pl | Perl | code_perl/test_array.pl | BioinformaticsTrainingXie/Novice2Expert4Bioinformatics | dd1987d5a993db02945e18f87d5e494c7f4e5fb3 | [
"CC0-1.0"
] | 1 | 2019-03-14T13:25:35.000Z | 2019-03-14T13:25:35.000Z | code_perl/test_array.pl | BioinformaticsTrainingXie/Novice2Expert4Bioinformatics | dd1987d5a993db02945e18f87d5e494c7f4e5fb3 | [
"CC0-1.0"
] | 3 | 2019-03-13T19:46:58.000Z | 2019-05-01T18:12:46.000Z | code_perl/test_array.pl | BioinformaticsTrainingXie/Novice2Expert4Bioinformatics | dd1987d5a993db02945e18f87d5e494c7f4e5fb3 | [
"CC0-1.0"
] | 3 | 2019-03-14T13:30:59.000Z | 2020-01-08T01:06:31.000Z | #!/usr/bin/perl -w
#use warnings;
use strict;
my @gene_expr = (1, 3, 10);
print "First element: ", $gene_expr[0], "\n";
#my $len = @gene_expr;
my $len = $#gene_expr + 1;
print "Old_array: @gene_expr\n";
print "Length of array: ", $len, "\n";
my $new_gene_expr = 20;
push @gene_expr, $new_gene_expr;
$len = $#gene_expr + 1;
print "New_array: @gene_expr\n";
print "Length of array: ", $len, "\n";
my @new_gene_expr_array = (2, 5);
push @gene_expr, @new_gene_expr_array;
$len = scalar @gene_expr;
print "New_array: @gene_expr\n";
print "Length of array: ", $len, "\n";
my $last_ele = pop @gene_expr;
print "Last element is: $last_ele\n";
print "New_array after poping: @gene_expr\n";
my $first_ele = shift @gene_expr;
print "First element is: $first_ele\n";
print "New_array after shifting: @gene_expr\n";
my $gene_expr1 = 100;
unshift @gene_expr, $gene_expr1;
print "New_array after unshifting: @gene_expr\n";
| 26.114286 | 49 | 0.683807 |
ed9662c3041c232d842f2391dcd0a6d90b807705 | 86 | t | Perl | Dotter.t | rgeorgiev583/dotter | 1210eb809c9a32b65852d8952963ac74683670a7 | [
"MIT"
] | null | null | null | Dotter.t | rgeorgiev583/dotter | 1210eb809c9a32b65852d8952963ac74683670a7 | [
"MIT"
] | null | null | null | Dotter.t | rgeorgiev583/dotter | 1210eb809c9a32b65852d8952963ac74683670a7 | [
"MIT"
] | null | null | null | #!/usr/bin/perl
use strict;
use warnings;
use Test::Simple tests => 0;
use Dotter;
| 9.555556 | 28 | 0.674419 |
ed855cc92a68ad1446a21627004fa872c5c36442 | 3,149 | pm | Perl | t/response/TestAPI/access2_24.pm | dreamhost/dpkg-ndn-perl-mod-perl | b22e73bc5bbff804cece98fd2b8138b14883efd3 | [
"Apache-2.0"
] | null | null | null | t/response/TestAPI/access2_24.pm | dreamhost/dpkg-ndn-perl-mod-perl | b22e73bc5bbff804cece98fd2b8138b14883efd3 | [
"Apache-2.0"
] | null | null | null | t/response/TestAPI/access2_24.pm | dreamhost/dpkg-ndn-perl-mod-perl | b22e73bc5bbff804cece98fd2b8138b14883efd3 | [
"Apache-2.0"
] | null | null | null | package TestAPI::access2_24;
# testing $r->requires
# in the POST test it returns:
#
# [
# {
# 'method_mask' => -1,
# 'requirement' => 'user goo bar'
# },
# {
# 'method_mask' => -1,
# 'requirement' => 'group bar tar'
# }
# {
# 'method_mask' => 4,
# 'requirement' => 'valid-user'
# }
# ];
#
# otherwise it returns the same, sans the 'valid-user' entry
#
# also test:
# - $r->some_auth_required when it's required
# - $r->satisfies when Satisfy is set
use strict;
use warnings FATAL => 'all';
use Apache2::Access ();
use Apache2::RequestRec ();
use Apache::TestTrace;
use Apache2::Const -compile => qw(OK HTTP_UNAUTHORIZED SERVER_ERROR
AUTHZ_GRANTED AUTHZ_DENIED M_POST :satisfy
AUTHZ_DENIED_NO_USER);
my $users = "goo bar";
my $groups = "xar tar";
my %users = (
goo => "goopass",
bar => "barpass",
);
sub authz_handler {
my $self = shift;
my $r = shift;
my $requires = shift;
if (!$r->user) {
return Apache2::Const::AUTHZ_DENIED_NO_USER;
}
return Apache2::Const::SERVER_ERROR unless
$requires eq $users or $requires eq $groups;
my @require_args = split(/\s+/, $requires);
if (grep {$_ eq $r->user} @require_args) {
return Apache2::Const::AUTHZ_GRANTED;
}
return Apache2::Const::AUTHZ_DENIED;
}
sub authn_handler {
my $self = shift;
my $r = shift;
die '$r->some_auth_required failed' unless $r->some_auth_required;
my $satisfies = $r->satisfies;
die "wanted satisfies=" . Apache2::Const::SATISFY_ALL . ", got $satisfies"
unless $r->satisfies() == Apache2::Const::SATISFY_ALL;
my ($rc, $sent_pw) = $r->get_basic_auth_pw;
return $rc if $rc != Apache2::Const::OK;
if ($r->method_number == Apache2::Const::M_POST) {
return Apache2::Const::OK;
}
my $user = $r->user;
my $pass = $users{$user} || '';
unless (defined $pass and $sent_pw eq $pass) {
$r->note_basic_auth_failure;
return Apache2::Const::HTTP_UNAUTHORIZED;
}
Apache2::Const::OK;
}
1;
__DATA__
<NoAutoConfig>
<IfModule mod_version.c>
<IfVersion > 2.4.1>
PerlAddAuthzProvider my-user TestAPI::access2_24->authz_handler
PerlAddAuthzProvider my-group TestAPI::access2_24->authz_handler
<Location /TestAPI__access2>
PerlAuthenHandler TestAPI::access2_24->authn_handler
PerlResponseHandler Apache::TestHandler::ok1
SetHandler modperl
<IfModule @ACCESS_MODULE@>
# needed to test $r->satisfies
Allow from All
</IfModule>
AuthType Basic
AuthName "Access"
Require my-user goo bar
Require my-group xar tar
<Limit POST>
Require valid-user
</Limit>
Satisfy All
<IfModule @AUTH_MODULE@>
# htpasswd -mbc auth-users goo foo
# htpasswd -mb auth-users bar mar
# using md5 password so it'll work on win32 too
AuthUserFile @DocumentRoot@/api/auth-users
# group: user1 user2 ...
AuthGroupFile @DocumentRoot@/api/auth-groups
</IfModule>
</Location>
</IfVersion>
</IfModule>
</NoAutoConfig>
| 24.038168 | 78 | 0.618292 |
73d0655c6ff3bf5d72e2222d1706f71bb20e69d7 | 16,656 | pm | Perl | DocDB/cgi/EventInstructions.pm | brianv0/DocDB | 4e5b406f9385862fb8f236fed1ecfa04c6ae663c | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 19 | 2016-03-10T14:28:38.000Z | 2022-03-14T03:27:16.000Z | DocDB/cgi/EventInstructions.pm | brianv0/DocDB | 4e5b406f9385862fb8f236fed1ecfa04c6ae663c | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 68 | 2016-01-20T16:35:06.000Z | 2021-12-29T15:24:31.000Z | DocDB/cgi/EventInstructions.pm | brianv0/DocDB | 4e5b406f9385862fb8f236fed1ecfa04c6ae663c | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 13 | 2015-02-08T02:19:54.000Z | 2022-02-18T12:05:47.000Z | # Name: $RCSfile$
# Description: The instructions for the event organizer and calendar in DocDB.
# This is mostly HTML, but making it a script allows us to eliminate
# parts of it that we don't want and get it following everyone's
# style, and allows groups to add to it with ProjectMessages.
#
# Revision: $Revision$
# Modified: $Author$ on $Date$
#
# Author: Eric Vaandering ([email protected])
# Copyright 2001-2013 Eric Vaandering, Lynn Garren, Adam Bryant
# This file is part of DocDB.
# DocDB is free software; you can redistribute it and/or modify
# it under the terms of version 2 of the GNU General Public License
# as published by the Free Software Foundation.
# DocDB is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with DocDB; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
sub EventInstructionsSidebar {
print <<TOC;
<h2>Contents</h2>
<ul>
<li><a href="#intro">Introduction</a></li>
<li><a href="#calendar">Calendar</a>
<ul>
<li><a href="#day">Daily view</a></li>
<li><a href="#month">Monthly view</a></li>
<li><a href="#year">Yearly view</a></li>
<li><a href="#upcoming">Upcoming events</a></li>
</ul></li>
<li><a href="#create">Creating a New Event</a>
<ul>
<li><a href="#eventinfo">Event information</a></li>
<li><a href="#sessions">Creating sessions</a></li>
</ul></li>
<li><a href="#talks">Managing Talks in Sessions</a>
<ul>
<li><a href="#basicinfo">Basic information</a></li>
<li><a href="#order">Ordering talks</a></li>
<li><a href="#confirm">Confirming documents</a></li>
<li><a href="#hints">Giving hints about talks</a></li>
</ul></li>
<li><a href="#modify">Modifying an Event</a></li>
TOC
if ($Preferences{Components}{iCal}) {
print "<li><a href=\"#ical\">Exporting events to iCal</a></li>\n";
}
print <<TOC;
<li><a href="#matching">Matching agenda with documents</a>
<ul>
<li><a href="#userentry">User uploads</a></li>
<li><a href="#hinting">DocDB guesses</a></li>
<li><a href="#confirm2">Moderator confirms</a></li>
</ul></li>
</ul>
TOC
}
sub EventInstructionsBody {
print <<HTML;
<a name="intro"></a>
<h1>Introduction</h1>
<p>
The event organizer and calendar system provides the ability to set up events
with arbitrary numbers of sessions and breaks. Within each session, a moderator
can set up an arbitrary number of talks
and small breaks, discussion sessions, etc. Each
talk has a running time and each session has a starting time. This creates a
time ordered agenda. Anything from an afternoon video conference to conferences
with parallel and plenary sessions can be organized with the organizer.
The calendar provides an easy way to see which events are scheduled.
</p>
<p>
These instructions refer to a "moderator" which is any user who is
authorized to organize an event. Several people can collaborate to organize an
event, but when changes collide or appear to collide, only the first is taken.
</p>
<a name="calendar"></a>
<h1>Using the Calendar</h1>
<p>
DocDB supplies a calendar which shows upcoming and past events.
The calendar also allows you easily create new events.
There are four <q>views</q> which the calendar supplies; the first view you
will likely see is the month view.
</p>
<a name="day"></a>
<h2>Daily view</h2>
<p>The daily view shows a detailed list of a day's events. Events with no
sessions are shown first, followed by the various sessions taking place on that
day. Start and end times as well as locations and URLs (if any) are also shown.
Click on the link for the event to see the relevant agenda. At the
top of the page are buttons that can be used to schedule events for that day.
You can also click on the dates at the top of the page to view the next or
previous days, or to switch to the monthly or yearly views.</p>
<a name="month"></a>
<h2>Monthly view</h2>
<p>The monthly view shows a whole month and an abbreviated list of the
events on each day. Start times for events that have them are shown. If you
move your mouse over the event link, you will see more information. Click on the
links to see the agendas. At the top-left of each day is a link to the daily
view for that date. Click on the plus sign at the top-right to add a new event
on that date. You can also click on the month names at the top of the page to
view the next or previous month or click on the year to switch to the yearly
view.</p>
<p>If you are viewing the current month, the table of upcoming events is also
shown.</p>
<a name="year"></a>
<h2>Yearly view</h2>
<p>The yearly view shows the calendar for a whole year. The linked dates are
days with events; click on a link to see the daily view for that day. Click on
the name of a month to see the monthly view for that month. You can also click
on the years at the top of the page to view the next or previous year.</p>
<p>If you are viewing the current year, the table of upcoming events is also
shown.</p>
<a name="upcoming"></a>
<h2>Upcoming events</h2>
<p>This view shows events scheduled for the next 60 days. The view is similar
to the day view in that titles, locations, and URLs are all shown. Click on the
links to view the agendas.</p>
<a name="create"></a>
<h1>Creating a New Event</h1>
<p>DocDB is capable of scheduling three kinds of events. Events with no sessions
(perhaps a conference someone from the group is attending), events with just one
session (a small meeting) or events with more than one session (a multi-day
meeting, perhaps with plenary and parallel sessions).</p>
<p>Begin by clicking the correct button on the <a href="$ModifyHome">Change or
Create</a> page according to how many sessions your event will have. (You can
always add sessions to existing events, so don't worry if you change your mind
later.) For creating an event with no sessions, follow just the instructions for
<a href="#eventinfo">Event information</a>. For events with one session follow
the instructions for <a href="#eventinfo">Event information</a> (realizing that
some of the inputs described are not present), and then follow the instructions
for <a href="#talks">Managing Talks in Sessions</a>. For meetings with multiple
sessions, follow all these instructions.</p>
<a name="eventinfo"></a>
<h2>Entering event information</h2>
<p>A list of the groups of events are shown; you must select one. You must also
provide a title, or short description, and start and end dates for the event.
A long description of the event, a location, and a URL (external homepage) are
all optional, but if they exist, you should supply them. The "Show All Talks"
selection controls what the user sees when viewing a event. In the event view,
either all the sessions for an event with all their talks can be shown or just
links to the various sessions. This should probably be checked for events with
just a few dozen talks, but left unchecked for larger events.</p>
<p>The boxes labeled Event Preamble and Epilogue provide a space for text which
will be placed at the top and bottom respectively of the display of the
event. A welcome message or instructions can be placed here.</p>
<p>Finally, the View and Modify selections are used to control which groups may
view and modify the agenda for the events. The documents (talks) themselves
rely on their own security, not this setting.</p>
<p>The same form is used for modifying event information.</p>
<a name="sessions"></a>
<h2>Creating Sessions</h2>
<p>On the same form used for creating or modifying an event, the moderator is
able to set up one or more sessions in the event. If there are not enough spaces
for all the needed sessions, don't worry; blank slots will be added after
"Submit" is pressed.</p>
<p>The order of these sessions is
displayed and can be changed by entering new numbers in the Order column.
Decimal numbers are OK, so entering "1.5" will place the session between those
currently numbered "1" and "2."</p>
<p>Sessions may be designated as "Breaks" which cannot have talks associated with
them. Breaks can be used for entering meals or other activities.</p>
<p>Existing sessions can be deleted by checking the delete box.</p>
<p>For each session or break, a location (such as a room number) and starting
time should be entered. A session title should be entered and a longer
description of the session (such as an explanation of the topics covered) may
also be entered.</p>
<p>Once at least one session has been added to the event, talks can be
associated with the sessions.</p>
<a name="talks"></a>
<h1>Adding and Modifying Talks in a Session</h1>
<p>To create or modify slots for talks in a session, either click on the "Modify
Session" button on the "Display Session" page or the "Modify Session Agenda"
link on the "Modify Event" page. For events with a single session, you will
modify event and talk information on the same page. The moderator may add as
many talks or breaks as needed (blank slots are created at the bottom after the
submit button is pushed). Breaks can be announcements, discussions, or coffee
breaks (any activity which won't have a document attached to it) during a
session.</p>
<p>To create an agenda, the moderator should enter as much information as needed
about each talk or break. The fields are described below.</p>
<a name="basicinfo"></a>
<h2>Entering basic talk information</h2>
<p>For each talk, at least a suggested title and time (length) should be entered.
A note on each talk can also be entered, but these are only visible when
clicking on the "Note" link in the event or session displays.</p>
<a name="order"></a>
<h2>Ordering the talks</h2>
<p>On the far left is the document order within the session. To reorder talks
within the session, just input new numbers and press "Submit." Decimal numbers
are allowed, so entering "1.5" will place that talk between the talks currently
numbered "1" and "2."</p>
<a name="confirm"></a>
<h2>Specifying, deleting, and confirming documents</h2>
<p>On the form there are places to enter the document number of a talk if the
moderator already knows it, to confirm a suggestion by the DocDB, and to delete
the entry for a talk entirely. (This will NOT delete the document from the
database, just the entry for the event.)</p>
<p>A confirmed talk is one where the relationship between agenda entry and
document has been verified by a human, not guessed at by DocDB as explained
below. Unconfirmed talks are shown in <i>italics</i> type.</p>
<p>By checking the "Reserve" box when creating creating or updating an agenda,
the moderator can create new documents with the title, authors, and topics chosen.
Then, the author can upload document by updating this initial document. If you
choose to do this, make sure the users understand that they are supposed
to update rather than create new documents.</p>
<p>If document numbers are entered manually, the "Confirm" box(es) must also be
checked, or DocDB will guess its own numbers instead.</p>
<a name="hints"></a>
<h2>Giving hints about the talks</h2>
<p>Finally, the moderator may enter the suggested authors and topics for the talks
to be given. This has two purposes. First, before documents are entered into
the DocDB, attendees can more clearly see what the preliminary agenda is.
Secondly, this assists DocDB in finding the correct matches as described below.
</p>
<a name="modify"></a>
<h1>Modifying an Event</h1>
<p>From the <a href="$ModifyHome">Create or Change</a> page, follow the link to
modify an existing event. Then select the event you wish to modify. You will see
the same page you used to create the event. If you are a moderator, you will
also see buttons to modify events or sessions when you view those events or
sessions.</p>
HTML
if ($Preferences{Components}{iCal}) {
print "<a name=\"ical\"></a><h1>Exporting events to iCal</h1>\n";
print "<p>Wherever you see the iCal icon <img src=\"$ImgURLPath/ical_small.png\" alt=\"iCal icon\" />,
you can click and get a list of events suitable to import into many calendar programs. (The iCal format
is standardized.) The events listed are those you are allowed to view meeting the criteria listed.
You can get a list of all the events, those one particular topic, in a group, etc.</p>
<p>
If you want to use the URL the icon links to as an automatically updated feed for your calendar program,
you can do that too,
but you will need to use the URL from the public DocDB installation since your calendar program
will not be able to reach secure DocDB pages. This, of course, requires events are created so that
the agendas are be publicly visible.\n";
}
print <<HTML;
<a name="matching"></a>
<h1>How DocDB Matches Agenda Entries with Documents</h1>
<p>In addition to the moderator associating or reserving talks as described
above, there are two other ways documents are matched with agenda entries. The
first way is for the user themselves match the documents. The second way is to
let DocDB guess.</p>
<p>A suggested course of action for the moderator is to first encourage users to
match their talks as described below. Then the moderator confirms its correct
guesses, and then inputs numbers manually to correct DocDB's incorrect guesses.
DocDB will not assign documents confirmed for another agenda entry to a second
entry, so confirming documents and letting it guess again may find correct
matches.</p>
<a name="userentry"></a>
<h2>User selects the talk</h2>
<p>When a user presses the button at the top of a session or event display that
says "Upload a document," they will see a document entry form with one small
addition: a menu to select his or her talk from the list of talks for that event
or session that have not yet been entered. When the user selects his or her talk
from this list, it is entered into the agenda as a confirmed talk, just as if
the moderator had followed the instructions below.</p>
<a name="hinting"></a>
<h2>DocDB selects based on hints</h2>
<p>For entries without a confirmed document, the DocDB will try to figure out
which agenda entry matches which document. To do this, the DocDB constructs a
list of documents which might match the entries in the agenda. It then compares
each of these documents against the items in the agenda and picks what it thinks
is the best document among them. This document becomes an unconfirmed match with
the entry in the agenda. If it guesses right, confirm the match by clicking the
confirm box.</p>
<p>The list of documents to check against comes from two sources. First
documents with modification times in a time window around the event dates are
considered. Second, documents associated with the event are considered.</p>
<p>
Documents are matched with the agenda entries using a scoring system that takes
into account several criteria:
<ul>
<li>Whether the document is associated with the event</li>
<li>If the document's topic(s) match those in the agenda</li>
<li>If the document's author(s) match those in the agenda</li>
<li>How well the title of the document matches the suggested title in the
agenda</li>
</ul></p>
<p>Points are assigned to documents for each of these criteria where the
document matches the agenda entry. For each agenda entry/document pair, a score
is calculated. If the score is high enough, that document is entered as an
unconfirmed match. When documents are confirmed, they are removed from
consideration, which may change which assignments DocDB makes.</p>
<p>The precise algorithm used in choosing the best match can be determined by
looking at the DocDB code. </p>
<a name="confirm2"></a>
<h2>Moderator corrects and/or confirms</h2>
<p>As explained above, the final step in the process is for the moderator to
either confirm DocDB's correct guesses or manually enter the correct document
number and check confirm. If a very good match for a suggested document is
found, a button to confirm the match will appear in the agenda. In all cases,
clicking on "Note" in the agenda will pop up a window that will list all
possible matches, from best to worst. Click the relevant button to confirm the
match.</p>
<p> For very small events (just a few talks) moderators may wish to not use
hints at all and just manually enter the talks.</p>
HTML
}
1;
| 44.180371 | 107 | 0.745977 |
ed87cf6d7707d03beab8ae4f6a4eb3b64548adb4 | 7,526 | pm | Perl | src/perl/stv/lib/PrefVote/STV/Round.pm | ikluft/prefvote | afa7ef3a568a839d81f5432975a09926ea66df71 | [
"Apache-2.0"
] | null | null | null | src/perl/stv/lib/PrefVote/STV/Round.pm | ikluft/prefvote | afa7ef3a568a839d81f5432975a09926ea66df71 | [
"Apache-2.0"
] | null | null | null | src/perl/stv/lib/PrefVote/STV/Round.pm | ikluft/prefvote | afa7ef3a568a839d81f5432975a09926ea66df71 | [
"Apache-2.0"
] | null | null | null | # PrefVote::STV::Round
# ABSTRACT: internal voting-round structure used by PrefVote::STV
# derived from Vote::STV by Ian Kluft
# Copyright (c) 1998-2022 by Ian Kluft
# Open Source license: Apache License 2.0 https://www.apache.org/licenses/LICENSE-2.0
# pragmas to silence some warnings from Perl::Critic
## no critic (Modules::RequireExplicitPackage)
# 'use strict' and 'use warnings' included here
# This solves a catch-22 where parts of Perl::Critic want both package and use-strict to be first
use Modern::Perl qw(2013); # require 5.16.0 or later
## use critic (Modules::RequireExplicitPackage)
#
# STV voting round class
#
package PrefVote::STV::Round;
use autodie;
use Readonly;
use Set::Tiny qw(set);
use PrefVote::Core;
use PrefVote::STV::Tally;
# class definitions
use Moo;
use MooX::TypeTiny;
use MooX::HandlesVia;
use Types::Standard qw(ArrayRef HashRef InstanceOf Map);
use Types::Common::String qw(NonEmptySimpleStr);
extends 'PrefVote::Core::Round';
use PrefVote::Core::Float qw(float_internal PVPositiveOrZeroNum);
# constants
Readonly::Hash my %blackbox_spec => (
votes_used => [qw(fp)],
quota => [qw(fp)],
tally => [qw(hash PrefVote::STV::Tally)],
);
PrefVote::Core::TestSpec->register_blackbox_spec(__PACKAGE__, spec => \%blackbox_spec,
parent => 'PrefVote::Core::Round');
# count of votes used/consumed in counting so far
has votes_used => (
is => 'rw',
isa => PVPositiveOrZeroNum,
default => 0,
);
around votes_used => sub {
my ($orig, $self, $param) = @_;
return $orig->($self, (defined $param ? (float_internal($param)) : ()));
};
# STV quota is the threshold to win the round as a function of seats available and candidates running
has quota => (
is => 'rw',
isa => PVPositiveOrZeroNum,
default => 0,
);
around quota => sub {
my ($orig, $self, $param) = @_;
return $orig->($self, (defined $param ? (float_internal($param)) : ()));
};
# candidate vote counts in the current round
has tally => (
is => 'rw',
isa => Map[NonEmptySimpleStr, InstanceOf["PrefVote::STV::Tally"]],
default => sub { return {} },
handles_via => 'Hash',
handles => {
tally_exists => 'exists',
tally_get => 'get',
tally_keys => 'keys',
tally_set => 'set',
},
);
# set candidate tallies
# candidates must be provided by new() for first round, later rounds this populates it from previous round
sub init_candidate_tally
{
my $self = shift;
# initialization for parent class PrefVote::Core::Round
$self->init_round_candidates();
# initialize candidate tally structures
foreach my $cand_name (@{$self->{candidates}}) {
$self->tally_set($cand_name, PrefVote::STV::Tally->new(name => $cand_name));
}
$self->debug_print("init_candidate_tally: tally structs ".join(" ", $self->tally_keys())."\n");
return;
}
# add to total votes found/used in the round
# this counts fractional votes for transfers above a winning candidate's quota
sub add_votes_used
{
my $self = shift;
my $votes = shift;
PVPositiveOrZeroNum->validate($votes);
if ($votes < 0) {
PrefVote::STV::Round::NegativeIncrementException->throw({classname => __PACKAGE__,
attribute => 'votes_used',
description => "negative incrememnt is invalid",
});
}
my $votes_used = $self->votes_used() + $votes;
$self->votes_used(float_internal($votes_used));
return $votes_used;
}
# sort the round's candidates list
# this is called after adding last item so we don't waste time sorting it more than once
sub sort_candidates
{
my ($self, $sort_fn) = @_;
if (not defined $sort_fn) {
# default sorting function is descending order by vote tally
# alternative sort functions are for testing (i.e. alphabetical sort allows testing without using votes)
my $tally_ref = $self->tally();
$sort_fn = sub {
# 1st/primary comparison: votes for candidate in descending order
my $votes0 = $tally_ref->{$_[0]}->votes();
my $votes1 = $tally_ref->{$_[1]}->votes();
if ($votes0 != $votes1) {
return $votes1 <=> $votes0;
}
# 2nd comparison: alphabetical (so ties in testing comparisons are consistent)
return $_[0] cmp $_[1];
};
} elsif (ref $sort_fn ne "CODE") {
PrefVote::STV::Round::BadSortingFnException->throw({classname => __PACKAGE__,
attribute => 'sort_fn',
description => "sorting function parameter is not a CODE reference: got ".(ref $sort_fn),
});
}
$self->candidates_sort_in_place($sort_fn);
$self->debug_print("sorted round candidate list = ".$self->candidates_join(" ")."\n");
return $self->candidates_all();
}
## no critic (Modules::ProhibitMultiplePackages)
#
# exception classes
#
package PrefVote::STV::Round::NegativeIncrementException;
use Moo;
use Types::Standard qw(Str);
extends 'PrefVote::Core::InternalDataException';
package PrefVote::STV::Round::BadSortingFnException;
use Moo;
use Types::Standard qw(Str);
extends 'PrefVote::Core::InternalDataException';
1;
__END__
# POD documentation
=encoding utf8
=head1 NAME
PrefVote::STV::Round - internal voting-round structure used by PrefVote::STV
=head1 SYNOPSIS
# from unit test code - not a full example
my @candidate_names = qw(ABNORMAL BORING CHAOTIC DYSFUNCTIONAL EVIL FACTIOUS);
my $stv_round_ref = PrefVote::STV::Round->new(number => 1, candidates => @candidate_names);
$stv_round_ref->init_candidate_tally();
$stv_round_ref->add_votes_used(10);
$stv_round_ref->sort_candidates(sub{ return $_[1] cmp $_[0] });
=head1 DESCRIPTION
I<PrefVote::STV::Round> keeps the data for a voting round in L<PrefVote::STV>.
It is a subclass of L<PrefVote::Core::Round>, and therefore contains the data and methods of that class as well.
=head1 ATTRIBUTES
=over 1
=item votes_used
'votes_used' is a floating point number of the total votes consumed by counting.
During the count it's a running total. Afterward, it's the final total.
=item quota
'Quota' is a floating point number with the threshold of votes required to win the round, expressed as a fraction.
It is a function of seats available and candidates running.
=item tally
'Tally' is a hash keyed by candidate names which each contain that candidate's tally in the round's count,
as a L<PrefVote::STV::Tally> object.
=back
=head1 METHODS
=over 1
=item init_candidate_tally ()
This must be called once for each I<PrefVote::STV::Round> object to initialize the candidates and set votes to zero.
It calls init_round_candidates() in L<PrefVote::Core::Round> to initialize the superclass' data.
=item add_votes_used ( int votes )
This adds to the total votes used, or consumed, in the current round's vote-counting.
=item sort_candidates ( [coderef sort_fn] )
This should be called once after votes have been counted, to sort the candidates in result order.
It takes an optional code reference parameter as a sorting function, which can be used for testing purposes
if full vote totals have not been added to the object.
By default the sort order is descending by vote totals in the current round.
=back
=head1 SEE ALSO
L<PrefVote:STV>
L<https://github.com/ikluft/prefvote>
=head1 BUGS AND LIMITATIONS
Please report bugs via GitHub at L<https://github.com/ikluft/prefvote/issues>
Patches and enhancements may be submitted via a pull request at L<https://github.com/ikluft/prefvote/pulls>
=cut
| 30.593496 | 116 | 0.692665 |
ed86333d4929a6e31893949880ece2754a88ee22 | 4,657 | pl | Perl | tests/tests/test072.pl | trealla-prolog/trealla | 0078db8af35bbb1b80ee7be3e1aea9bf73bf3f4e | [
"MIT"
] | 7 | 2022-02-26T20:15:27.000Z | 2022-03-30T21:36:08.000Z | tests/tests/test072.pl | trealla-prolog/trealla | 0078db8af35bbb1b80ee7be3e1aea9bf73bf3f4e | [
"MIT"
] | 78 | 2022-02-05T02:21:01.000Z | 2022-03-31T23:22:24.000Z | tests/tests/test072.pl | trealla-prolog/trealla | 0078db8af35bbb1b80ee7be3e1aea9bf73bf3f4e | [
"MIT"
] | 1 | 2022-03-06T03:25:39.000Z | 2022-03-06T03:25:39.000Z | :- initialization(main(10)).
:- use_module(library(apply)).
:- use_module(library(lists)).
main(Size) :-
setof(Total, M^Freq^Perm^square(Size, M, Total, Freq, Perm), Totals),
last(Totals, Max),
square(Size, Board, Max, _, Permutation),
writeq([Permutation, Board, Max]), nl.
var_matrix(Size, M) :-
repeat(Size, Size, RowLengths),
maplist(var_list, RowLengths, M).
repeat(X, 1, [X]) :-
!.
repeat(X, N, [X|R]) :-
NewN is N - 1,
repeat(X, NewN, R).
var_list(N, L) :-
length(L, N).
transpose(M, T) :-
[H|_] = M,
length(H, NCols),
from_to(1, NCols, L),
maplist(col(M), L, T).
col(Matrix, N, Column) :-
maplist(nth1(N), Matrix, Column).
list_permute([], _, []).
list_permute([P1|Rest], L, [H|T]) :-
nth1(P1, L, H),
list_permute(Rest, L, T).
snd((_, X), X).
retain_var(_, [], []).
retain_var(V, [H|T], [H|L]) :-
H == V,
retain_var(V, T, L).
retain_var(V, [H|T], L) :-
H \== V,
retain_var(V, T, L).
count_var(VarList, Var, Num) :-
retain_var(Var, VarList, List),
length(List, Num).
total(Ints, Total) :-
total(Ints, 0, Total).
total([], S, S).
total([(X, Y)|T], Acc, S) :-
NewAcc is Acc + X*Y,
total(T, NewAcc, S).
zip([], _, []) :-
!.
zip(_, [], []) :-
!.
zip([H1|T1], [H2|T2], [(H1, H2)|T]) :-
zip(T1, T2, T).
from_to(M, N, L) :-
( var(L)
; is_list(L)
),
integer(M),
integer(N),
M =< N,
from_to_acc(M, [N], L),
!.
from_to(H, N, [H|T]) :-
last([H|T], N),
!,
H =< N.
from_to_acc(H, [H|T], [H|T]).
from_to_acc(M, [H|T], L) :-
NewHead is H - 1,
!,
from_to_acc(M, [NewHead, H|T], L).
eval_matrix(Matrix, FreqSorted) :-
flatten(Matrix, Entries),
setof(E, member(E, Entries), Set),
maplist(count_var(Entries), Set, Multiplicities),
zip(Multiplicities, Set, Frequencies),
sort(Frequencies, FreqSorted),
maplist(snd, FreqSorted, VarsSorted),
length(VarsSorted, NVars),
from_to(1, NVars, VarsSorted).
distinct([_]).
distinct([H|T]) :-
notin(H, T),
distinct(T).
notin(_, []).
notin(E, [H|T]) :-
E \== H,
notin(E, T).
next_partition([(2, 1)|T], [(1, 2)|T]).
next_partition([(2, AlphaK)|T], [(1, 2), (2, NewAlphaK)|T]) :-
AlphaK > 1,
NewAlphaK is AlphaK - 1.
next_partition([(K, 1)|T], [(1, 1), (NewK, 1)|T]) :-
K > 2,
NewK is K - 1.
next_partition([(K, AlphaK)|T], [(1, 1), (NewK, 1), (K, NewAlphaK)|T]) :-
K > 2,
AlphaK > 1,
NewK is K - 1,
NewAlphaK is AlphaK - 1.
next_partition([(1, Alpha1), (2, 1)|T], [(1, NewAlpha)|T]) :-
NewAlpha is Alpha1 + 2.
next_partition([(1, Alpha1), (2, Alpha2)|T], [(1, NewAlpha1), (2, NewAlpha2)|T]) :-
Alpha2 > 1,
NewAlpha1 is Alpha1 + 2,
NewAlpha2 is Alpha2 - 1.
next_partition([(1, Alpha1), (L, 1)|T], [(Rest, 1), (NewL, Ratio)|T]) :-
L > 2,
NewL is L - 1,
Rest is (Alpha1 + L) mod NewL,
Rest > 0,
Ratio is (Alpha1 + L) // NewL.
next_partition([(1, Alpha1), (L, 1)|T], [(NewL, Ratio)|T]) :-
L > 2,
NewL is L - 1,
Rest is (Alpha1 + L) mod NewL,
Rest =:= 0,
Ratio is (Alpha1 + L) // NewL.
next_partition([(1, Alpha1), (L, AlphaL)|T], [(Rest, 1), (NewL, Ratio), (L, NewAlphaL)|T]) :-
L > 2,
AlphaL > 1,
NewL is L - 1,
Rest is (Alpha1 + L) mod NewL,
Rest > 0,
Ratio is (Alpha1 + L) // NewL,
NewAlphaL is AlphaL - 1.
next_partition([(1, Alpha1), (L, AlphaL)|T], [(NewL, Ratio), (L, NewAlphaL)|T]) :-
L > 2,
AlphaL > 1,
NewL is L - 1,
Rest is (Alpha1 + L) mod NewL,
Rest =:= 0,
Ratio is (Alpha1 + L) // NewL,
NewAlphaL is AlphaL - 1.
ad_partition(N, [(K, AlphaK)|T]) :-
generator([(N, 1)], [(K, AlphaK)|T]),
K > 1.
generator(From, From).
generator(Last, P) :-
next_partition(Last, New),
generator(New, P).
splitter(N, Type, S) :-
from_to(1, N, L),
splitter(L, Type, [], S).
splitter([], [(_, 0)], Acc, S) :-
reverse(Acc, S),
!.
splitter(L, [(_, 0)|T], Acc, S) :-
splitter(L, T, Acc, S).
splitter(L, [(K, AlphaK)|T], Acc, S) :-
AlphaK > 0,
append(L1, L2, L),
length(L1, K),
NewAlphaK is AlphaK - 1,
splitter(L2, [(K, NewAlphaK)|T], [L1|Acc], S).
list_rotate([H|T], L) :-
append(T, [H], L).
rep_perm(N, Type, Perm) :-
splitter(N, Type, S),
maplist(list_rotate, S, R),
flatten(R, Perm).
square(Size, M, Total, Frequencies, Permutation) :-
var_matrix(Size, M),
ad_partition(Size, Partition),
rep_perm(Size, Partition, Permutation),
list_permute(Permutation, M, P),
transpose(P, M),
distinct(M),
eval_matrix(M, Frequencies),
total(Frequencies, Total).
| 23.760204 | 93 | 0.535323 |
ed4430815da79f9700268bf5ae10d6ac117f640a | 2,687 | t | Perl | t/roles/role_composite_exclusion.t | exodist/Moose | fadfec9e315aa33fb22c96982704c8c0d15e84c6 | [
"Artistic-1.0"
] | 94 | 2015-01-04T18:17:36.000Z | 2021-11-18T04:51:22.000Z | t/roles/role_composite_exclusion.t | moose-skaters/moose1 | 648aa02b38b313ca7697de7175f6ca540df16612 | [
"Artistic-1.0"
] | 82 | 2015-01-01T18:57:16.000Z | 2021-04-13T17:58:38.000Z | t/roles/role_composite_exclusion.t | moose-skaters/moose1 | 648aa02b38b313ca7697de7175f6ca540df16612 | [
"Artistic-1.0"
] | 69 | 2015-01-06T00:59:52.000Z | 2022-01-17T16:52:38.000Z | use strict;
use warnings;
use Test::More;
use Test::Fatal;
use Moose::Meta::Role::Application::RoleSummation;
use Moose::Meta::Role::Composite;
{
package Role::Foo;
use Moose::Role;
package Role::Bar;
use Moose::Role;
package Role::ExcludesFoo;
use Moose::Role;
excludes 'Role::Foo';
package Role::DoesExcludesFoo;
use Moose::Role;
with 'Role::ExcludesFoo';
package Role::DoesFoo;
use Moose::Role;
with 'Role::Foo';
}
ok(Role::ExcludesFoo->meta->excludes_role('Role::Foo'), '... got the right exclusions');
ok(Role::DoesExcludesFoo->meta->excludes_role('Role::Foo'), '... got the right exclusions');
# test simple exclusion
isnt( exception {
Moose::Meta::Role::Application::RoleSummation->new->apply(
Moose::Meta::Role::Composite->new(
roles => [
Role::Foo->meta,
Role::ExcludesFoo->meta,
]
)
);
}, undef, '... this fails as expected' );
# test no conflicts
{
my $c = Moose::Meta::Role::Composite->new(
roles => [
Role::Foo->meta,
Role::Bar->meta,
]
);
isa_ok($c, 'Moose::Meta::Role::Composite');
is($c->name, 'Role::Foo|Role::Bar', '... got the composite role name');
is( exception {
Moose::Meta::Role::Application::RoleSummation->new->apply($c);
}, undef, '... this lives as expected' );
}
# test no conflicts w/exclusion
{
my $c = Moose::Meta::Role::Composite->new(
roles => [
Role::Bar->meta,
Role::ExcludesFoo->meta,
]
);
isa_ok($c, 'Moose::Meta::Role::Composite');
is($c->name, 'Role::Bar|Role::ExcludesFoo', '... got the composite role name');
is( exception {
Moose::Meta::Role::Application::RoleSummation->new->apply($c);
}, undef, '... this lives as expected' );
is_deeply([$c->get_excluded_roles_list], ['Role::Foo'], '... has excluded roles');
}
# test conflict with an "inherited" exclusion
isnt( exception {
Moose::Meta::Role::Application::RoleSummation->new->apply(
Moose::Meta::Role::Composite->new(
roles => [
Role::Foo->meta,
Role::DoesExcludesFoo->meta,
]
)
);
}, undef, '... this fails as expected' );
# test conflict with an "inherited" exclusion of an "inherited" role
isnt( exception {
Moose::Meta::Role::Application::RoleSummation->new->apply(
Moose::Meta::Role::Composite->new(
roles => [
Role::DoesFoo->meta,
Role::DoesExcludesFoo->meta,
]
)
);
}, undef, '... this fails as expected' );
done_testing;
| 24.87963 | 92 | 0.566803 |
eda7921aac4fcd851bfe03e6919b608aed27f4a4 | 2,065 | t | Perl | t/002-cache.t | moose/Class-Method-Modifiers | 9f67c8f1cb1c2b437e5d64bb7d813174055615a0 | [
"Artistic-1.0"
] | 2 | 2017-04-01T14:11:14.000Z | 2018-10-17T15:27:44.000Z | t/002-cache.t | moose/Class-Method-Modifiers | 9f67c8f1cb1c2b437e5d64bb7d813174055615a0 | [
"Artistic-1.0"
] | 2 | 2015-05-21T17:11:39.000Z | 2017-01-16T06:03:01.000Z | t/002-cache.t | moose/Class-Method-Modifiers | 9f67c8f1cb1c2b437e5d64bb7d813174055615a0 | [
"Artistic-1.0"
] | 4 | 2015-05-21T13:03:30.000Z | 2020-08-13T16:03:02.000Z | use strict;
use warnings;
use Test::More 0.88;
use if $ENV{AUTHOR_TESTING}, 'Test::Warnings';
my $orig;
my $code = sub { die };
do {
package Class;
use Class::Method::Modifiers;
sub method {}
$orig = Class->can('method');
before method => $code;
};
is_deeply(\%Class::Method::Modifiers::MODIFIER_CACHE, {
Class => {
method => {
before => [$code],
after => [],
around => [],
orig => $orig,
wrapped => $orig,
},
},
});
my $code2 = sub { 1 + 1 };
do {
package Child;
BEGIN { our @ISA = 'Class' }
use Class::Method::Modifiers;
after method => $code2;
};
my $fake = $Class::Method::Modifiers::MODIFIER_CACHE{Child}{method}{wrapped};
is_deeply(\%Class::Method::Modifiers::MODIFIER_CACHE, {
Class => {
method => {
before => [$code],
after => [],
around => [],
orig => $orig,
wrapped => $orig,
},
},
Child => {
method => {
before => [],
after => [$code2],
around => [],
orig => undef,
wrapped => $fake,
},
},
});
my $around1 = sub { "around1" };
my $around2 = sub { "around2" };
do {
package Class;
use Class::Method::Modifiers;
around method => $around1;
around method => $around2;
};
# XXX: hard to test, we have no other way of getting at this coderef
my $wrapped = $Class::Method::Modifiers::MODIFIER_CACHE{Class}{method}{wrapped};
is_deeply(\%Class::Method::Modifiers::MODIFIER_CACHE, {
Class => {
method => {
around => [$around2, $around1],
before => [$code],
after => [],
orig => $orig,
wrapped => $wrapped,
},
},
Child => {
method => {
before => [],
after => [$code2],
around => [],
orig => undef,
wrapped => $fake,
},
},
});
done_testing;
| 20.445545 | 81 | 0.458596 |
ed17f6bcee21a4cae5b4e018ad89702bccf33647 | 375 | pl | Perl | libs/perllib/LoxBerry/testing/testlog_loglevelchange.pl | nufke/Loxberry | 58a7dc49bd6af9cbe44c431e05b46ad7bdaaa77d | [
"Apache-2.0"
] | 60 | 2016-07-14T13:19:30.000Z | 2022-03-13T11:46:39.000Z | libs/perllib/LoxBerry/testing/testlog_loglevelchange.pl | nufke/Loxberry | 58a7dc49bd6af9cbe44c431e05b46ad7bdaaa77d | [
"Apache-2.0"
] | 1,251 | 2016-07-06T12:41:21.000Z | 2022-03-30T13:53:04.000Z | libs/perllib/LoxBerry/testing/testlog_loglevelchange.pl | nufke/Loxberry | 58a7dc49bd6af9cbe44c431e05b46ad7bdaaa77d | [
"Apache-2.0"
] | 43 | 2016-07-15T05:56:32.000Z | 2021-12-23T16:01:20.000Z | #!/usr/bin/perl
use LoxBerry::Log;
print "Hallo\n";
# my $log = LoxBerry::Log->new ( logdir => "$lbslogdir", name => 'test', package => 'Test', loglevel => 3);
my $log = LoxBerry::Log->new (
filename => "/opt/loxberry/log/plugins/lbbackup/test.log",
name => 'test',
package => 'lbbackup',
stderr => 1,
addtime => 1,
);
while(1) {
LOGINF "Hallo";
sleep(10);
}
| 17.857143 | 107 | 0.592 |
ed91b53f203b8a2792da89fb27e2262f0653f63d | 4,274 | pm | Perl | cloud/azure/compute/virtualmachine/mode/memory.pm | centreon-lab/centreon-plugins | 68096c697a9e1baf89a712674a193d9a9321503c | [
"Apache-2.0"
] | null | null | null | cloud/azure/compute/virtualmachine/mode/memory.pm | centreon-lab/centreon-plugins | 68096c697a9e1baf89a712674a193d9a9321503c | [
"Apache-2.0"
] | null | null | null | cloud/azure/compute/virtualmachine/mode/memory.pm | centreon-lab/centreon-plugins | 68096c697a9e1baf89a712674a193d9a9321503c | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2022 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package cloud::azure::compute::virtualmachine::mode::memory;
use base qw(cloud::azure::custom::mode);
use strict;
use warnings;
sub get_metrics_mapping {
my ($self, %options) = @_;
my $metrics_mapping = {
'Available Memory Bytes' => {
'output' => 'Available Memory Bytes',
'label' => 'memory-available',
'nlabel' => 'memory.available.bytes',
'unit' => 'B'
}
};
return $metrics_mapping;
}
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options, force_new_perfdata => 1);
bless $self, $class;
$options{options}->add_options(arguments => {
'resource:s' => { name => 'resource' },
'resource-group:s' => { name => 'resource_group' }
});
return $self;
}
sub check_options {
my ($self, %options) = @_;
$self->SUPER::check_options(%options);
if (!defined($self->{option_results}->{resource}) || $self->{option_results}->{resource} eq '') {
$self->{output}->add_option_msg(short_msg => 'Need to specify either --resource <name> with --resource-group option or --resource <id>.');
$self->{output}->option_exit();
}
my $resource = $self->{option_results}->{resource};
my $resource_group = defined($self->{option_results}->{resource_group}) ? $self->{option_results}->{resource_group} : '';
if ($resource =~ /^\/subscriptions\/.*\/resourceGroups\/(.*)\/providers\/Microsoft\.Compute\/virtualMachines\/(.*)$/) {
$resource_group = $1;
$resource = $2;
}
$self->{az_resource} = $resource;
$self->{az_resource_group} = $resource_group;
$self->{az_resource_type} = 'virtualMachines';
$self->{az_resource_namespace} = 'Microsoft.Compute';
$self->{az_timeframe} = defined($self->{option_results}->{timeframe}) ? $self->{option_results}->{timeframe} : 900;
$self->{az_interval} = defined($self->{option_results}->{interval}) ? $self->{option_results}->{interval} : 'PT5M';
$self->{az_aggregations} = ['Average'];
if (defined($self->{option_results}->{aggregation})) {
$self->{az_aggregations} = [];
foreach my $stat (@{$self->{option_results}->{aggregation}}) {
if ($stat ne '') {
push @{$self->{az_aggregations}}, ucfirst(lc($stat));
}
}
}
foreach my $metric (keys %{$self->{metrics_mapping}}) {
next if (defined($self->{option_results}->{filter_metric}) && $self->{option_results}->{filter_metric} ne ''
&& $metric !~ /$self->{option_results}->{filter_metric}/);
push @{$self->{az_metrics}}, $metric;
}
}
1;
__END__
=head1 MODE
Check Azure MS Compute Virtual Machine available memory.
perl centreon_plugins.pl --plugin=cloud::azure::compute::virtualmachine::plugin --mode=memory \
--custommode='api' --resource='***' --resource-group='***' \
--subscription='***' --tenant='***' --client-id='***' --client-secret='**' --timeframe='900' \
--interval='PT1M' --aggregation='average' --warning-memory-available=1024: --verbose
Note that using a specific threshold syntax is required to trigger an alert when
the value drops below available memory "<value>:"
=over 8
=item B<--resource>
Set resource name or id (Required).
=item B<--resource-group>
Set resource group (Required if resource's name is used).
=item B<--warning-memory-available>
Warning threshold.
=item B<--critical-memory-available>
Critical threshold.
=back
=cut
| 32.625954 | 146 | 0.641554 |
edc028a28c1c67a4a6dac9be9d5807c6a285a461 | 636 | pm | Perl | auto-lib/Paws/DynamoDB/DescribeGlobalTableSettingsOutput.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/DynamoDB/DescribeGlobalTableSettingsOutput.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/DynamoDB/DescribeGlobalTableSettingsOutput.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
] | null | null | null |
package Paws::DynamoDB::DescribeGlobalTableSettingsOutput;
use Moose;
has GlobalTableName => (is => 'ro', isa => 'Str');
has ReplicaSettings => (is => 'ro', isa => 'ArrayRef[Paws::DynamoDB::ReplicaSettingsDescription]');
has _request_id => (is => 'ro', isa => 'Str');
### main pod documentation begin ###
=head1 NAME
Paws::DynamoDB::DescribeGlobalTableSettingsOutput
=head1 ATTRIBUTES
=head2 GlobalTableName => Str
The name of the global table.
=head2 ReplicaSettings => ArrayRef[L<Paws::DynamoDB::ReplicaSettingsDescription>]
The region specific settings for the global table.
=head2 _request_id => Str
=cut
1; | 19.272727 | 101 | 0.715409 |
edbae287753582e7993dcc33aa987f062fa1028b | 1,322 | pm | Perl | lib/Rapi/Demo/Chinook/DB/Result/Album.pm | manwar/Rapi-Demo-Chinook | 61b051cf4c16f13840fad0954d83d742490641b9 | [
"Artistic-1.0"
] | 1 | 2017-02-21T16:09:23.000Z | 2017-02-21T16:09:23.000Z | lib/Rapi/Demo/Chinook/DB/Result/Album.pm | manwar/Rapi-Demo-Chinook | 61b051cf4c16f13840fad0954d83d742490641b9 | [
"Artistic-1.0"
] | null | null | null | lib/Rapi/Demo/Chinook/DB/Result/Album.pm | manwar/Rapi-Demo-Chinook | 61b051cf4c16f13840fad0954d83d742490641b9 | [
"Artistic-1.0"
] | 1 | 2019-10-02T11:29:35.000Z | 2019-10-02T11:29:35.000Z | use utf8;
package Rapi::Demo::Chinook::DB::Result::Album;
# Created by DBIx::Class::Schema::Loader
# DO NOT MODIFY THE FIRST PART OF THIS FILE
use strict;
use warnings;
use Moose;
use MooseX::NonMoose;
use MooseX::MarkAsMethods autoclean => 1;
extends 'DBIx::Class::Core';
__PACKAGE__->load_components("InflateColumn::DateTime");
__PACKAGE__->table("Album");
__PACKAGE__->add_columns(
"albumid",
{ data_type => "integer", is_auto_increment => 1, is_nullable => 0 },
"title",
{ data_type => "nvarchar", is_nullable => 0, size => 160 },
"artistid",
{ data_type => "integer", is_foreign_key => 1, is_nullable => 0 },
);
__PACKAGE__->set_primary_key("albumid");
__PACKAGE__->belongs_to(
"artistid",
"Rapi::Demo::Chinook::DB::Result::Artist",
{ artistid => "artistid" },
{ is_deferrable => 0, on_delete => "NO ACTION", on_update => "NO ACTION" },
);
__PACKAGE__->has_many(
"tracks",
"Rapi::Demo::Chinook::DB::Result::Track",
{ "foreign.albumid" => "self.albumid" },
{ cascade_copy => 0, cascade_delete => 0 },
);
# Created by DBIx::Class::Schema::Loader v0.07036 @ 2013-09-12 15:36:29
# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:OJ1U992pTI/1TC7qsm8syA
# You can replace this text with custom code or comments, and it will be preserved on regeneration
__PACKAGE__->meta->make_immutable;
1;
| 28.73913 | 98 | 0.692133 |
ed6939563e0a2fb8f4172bea88ce286f72011b15 | 6,686 | pm | Perl | lib/Perl/Critic/Theme.pm | git-the-cpan/Perl-Critic | 7ae3fb01e11f96772ab9180d70fa9847cc5c3cf3 | [
"Artistic-1.0"
] | null | null | null | lib/Perl/Critic/Theme.pm | git-the-cpan/Perl-Critic | 7ae3fb01e11f96772ab9180d70fa9847cc5c3cf3 | [
"Artistic-1.0"
] | null | null | null | lib/Perl/Critic/Theme.pm | git-the-cpan/Perl-Critic | 7ae3fb01e11f96772ab9180d70fa9847cc5c3cf3 | [
"Artistic-1.0"
] | null | null | null | package Perl::Critic::Theme;
use 5.006001;
use strict;
use warnings;
use English qw(-no_match_vars);
use Readonly;
use Exporter 'import';
use List::MoreUtils qw(any);
use Perl::Critic::Utils qw{ :characters :data_conversion };
use Perl::Critic::Exception::Fatal::Internal qw{ &throw_internal };
use Perl::Critic::Exception::Configuration::Option::Global::ParameterValue
qw{ &throw_global_value };
#-----------------------------------------------------------------------------
our $VERSION = '1.126';
#-----------------------------------------------------------------------------
Readonly::Array our @EXPORT_OK => qw{
$RULE_INVALID_CHARACTER_REGEX
cook_rule
};
#-----------------------------------------------------------------------------
Readonly::Scalar our $RULE_INVALID_CHARACTER_REGEX =>
qr/ ( [^()\s\w\d+\-*&|!] ) /xms;
#-----------------------------------------------------------------------------
Readonly::Scalar my $CONFIG_KEY => 'theme';
#-----------------------------------------------------------------------------
sub new {
my ( $class, %args ) = @_;
my $self = bless {}, $class;
$self->_init( %args );
return $self;
}
#-----------------------------------------------------------------------------
sub _init {
my ($self, %args) = @_;
my $rule = $args{-rule} || $EMPTY;
if ( $rule =~ m/$RULE_INVALID_CHARACTER_REGEX/xms ) {
throw_global_value
option_name => $CONFIG_KEY,
option_value => $rule,
message_suffix => qq{contains an invalid character: "$1".};
}
$self->{_rule} = cook_rule( $rule );
return $self;
}
#-----------------------------------------------------------------------------
sub rule {
my $self = shift;
return $self->{_rule};
}
#-----------------------------------------------------------------------------
sub policy_is_thematic {
my ($self, %args) = @_;
my $policy = $args{-policy}
|| throw_internal 'The -policy argument is required';
ref $policy
|| throw_internal 'The -policy must be an object';
my $rule = $self->{_rule} or return 1;
my %themes = hashify( $policy->get_themes() );
# This bit of magic turns the rule into a perl expression that can be
# eval-ed for truth. Each theme name in the rule is translated to 1 or 0
# if the $policy belongs in that theme. For example:
#
# 'bugs && (pbp || core)' ...could become... '1 && (0 || 1)'
my $as_code = $rule; #Making a copy, so $rule is preserved
$as_code =~ s/ ( [\w\d]+ ) /exists $themes{$1} || 0/gexms;
my $is_thematic = eval $as_code; ## no critic (ProhibitStringyEval)
if ($EVAL_ERROR) {
throw_global_value
option_name => $CONFIG_KEY,
option_value => $rule,
message_suffix => q{contains a syntax error.};
}
return $is_thematic;
}
#-----------------------------------------------------------------------------
sub cook_rule {
my ($raw_rule) = @_;
return if not defined $raw_rule;
#Translate logical operators
$raw_rule =~ s{\b not \b}{!}ixmsg; # "not" -> "!"
$raw_rule =~ s{\b and \b}{&&}ixmsg; # "and" -> "&&"
$raw_rule =~ s{\b or \b}{||}ixmsg; # "or" -> "||"
#Translate algebra operators (for backward compatibility)
$raw_rule =~ s{\A [-] }{!}ixmsg; # "-" -> "!" e.g. difference
$raw_rule =~ s{ [-] }{&& !}ixmsg; # "-" -> "&& !" e.g. difference
$raw_rule =~ s{ [*] }{&&}ixmsg; # "*" -> "&&" e.g. intersection
$raw_rule =~ s{ [+] }{||}ixmsg; # "+" -> "||" e.g. union
my $cooked_rule = lc $raw_rule; #Is now cooked!
return $cooked_rule;
}
1;
__END__
#-----------------------------------------------------------------------------
=pod
=head1 NAME
Perl::Critic::Theme - Construct thematic sets of policies.
=head1 DESCRIPTION
This is a helper class for evaluating theme expressions into sets of
Policy objects. There are no user-serviceable parts here.
=head1 INTERFACE SUPPORT
This is considered to be a non-public class. Its interface is subject
to change without notice.
=head1 METHODS
=over
=item C<< new( -rule => $rule_expression ) >>
Returns a reference to a new Perl::Critic::Theme object. C<-rule> is
a string expression that evaluates to true or false for each Policy..
See L<"THEME RULES"> for more information.
=item C<< policy_is_thematic( -policy => $policy ) >>
Given a reference to a L<Perl::Critic::Policy|Perl::Critic::Policy>
object, this method returns evaluates the rule against the themes that
are associated with the Policy. Returns 1 if the Policy satisfies the
rule, 0 otherwise.
=item C< rule() >
Returns the rule expression that was used to construct this Theme.
The rule may have been translated into a normalized expression. See
L<"THEME RULES"> for more information.
=back
=head2 THEME RULES
A theme rule is a simple boolean expression, where the operands are
the names of any of the themes associated with the
Perl::Critic::Polices.
Theme names can be combined with logical operators to form arbitrarily
complex expressions. Precedence is the same as normal mathematics,
but you can use parentheses to enforce precedence as well. Supported
operators are:
Operator Altertative Example
----------------------------------------------------------------
&& and 'pbp && core'
|| or 'pbp || (bugs && security)'
! not 'pbp && ! (portability || complexity)
See L<Perl::Critic/"CONFIGURATION"> for more information about
customizing the themes for each Policy.
=head1 SUBROUTINES
=over
=item C<cook_rule( $rule )>
Standardize a rule into a almost executable Perl code. The "almost"
comes from the fact that theme names are left as is.
=back
=head1 CONSTANTS
=over
=item C<$RULE_INVALID_CHARACTER_REGEX>
A regular expression that will return the first character in the
matched expression that is not valid in a rule.
=back
=head1 AUTHOR
Jeffrey Ryan Thalhammer <[email protected]>
=head1 COPYRIGHT
Copyright (c) 2006-2011 Imaginative Software Systems
This program is free software; you can redistribute it and/or modify
it under the same terms as Perl itself. The full text of this license
can be found in the LICENSE file included with this module.
=cut
##############################################################################
# Local Variables:
# mode: cperl
# cperl-indent-level: 4
# fill-column: 78
# indent-tabs-mode: nil
# c-indentation-style: bsd
# End:
# ex: set ts=8 sts=4 sw=4 tw=78 ft=perl expandtab shiftround :
| 26.426877 | 78 | 0.555489 |
edccd6023872788d64c2663923b16854364f730f | 3,177 | pm | Perl | lib/Google/Ads/GoogleAds/V8/Services/CampaignExperimentService.pm | googleads/google-ads-perl | 3ee6c09e11330fea1e6a0c9ee9f837e5e36d8177 | [
"Apache-2.0"
] | 19 | 2019-06-21T00:43:57.000Z | 2022-03-29T14:23:01.000Z | lib/Google/Ads/GoogleAds/V8/Services/CampaignExperimentService.pm | googleads/google-ads-perl | 3ee6c09e11330fea1e6a0c9ee9f837e5e36d8177 | [
"Apache-2.0"
] | 16 | 2020-03-04T07:44:53.000Z | 2021-12-15T23:06:23.000Z | lib/Google/Ads/GoogleAds/V8/Services/CampaignExperimentService.pm | googleads/google-ads-perl | 3ee6c09e11330fea1e6a0c9ee9f837e5e36d8177 | [
"Apache-2.0"
] | 9 | 2020-02-28T03:00:48.000Z | 2021-11-10T14:23:02.000Z | # Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Services::CampaignExperimentService;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseService);
sub create {
my $self = shift;
my $request_body = shift;
my $http_method = 'POST';
my $request_path = 'v8/customers/{+customerId}/campaignExperiments:create';
my $response_type = 'Google::Ads::GoogleAds::LongRunning::Operation';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
sub end {
my $self = shift;
my $request_body = shift;
my $http_method = 'POST';
my $request_path = 'v8/{+campaignExperiment}:end';
my $response_type = '';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
sub get {
my $self = shift;
my $request_body = shift;
my $http_method = 'GET';
my $request_path = 'v8/{+resourceName}';
my $response_type =
'Google::Ads::GoogleAds::V8::Resources::CampaignExperiment';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
sub graduate {
my $self = shift;
my $request_body = shift;
my $http_method = 'POST';
my $request_path = 'v8/{+campaignExperiment}:graduate';
my $response_type =
'Google::Ads::GoogleAds::V8::Services::CampaignExperimentService::GraduateCampaignExperimentResponse';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
sub list_async_errors {
my $self = shift;
my $request_body = shift;
my $http_method = 'GET';
my $request_path = 'v8/{+resourceName}:listAsyncErrors';
my $response_type =
'Google::Ads::GoogleAds::V8::Services::CampaignExperimentService::ListCampaignExperimentAsyncErrorsResponse';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
sub mutate {
my $self = shift;
my $request_body = shift;
my $http_method = 'POST';
my $request_path = 'v8/customers/{+customerId}/campaignExperiments:mutate';
my $response_type =
'Google::Ads::GoogleAds::V8::Services::CampaignExperimentService::MutateCampaignExperimentsResponse';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
sub promote {
my $self = shift;
my $request_body = shift;
my $http_method = 'POST';
my $request_path = 'v8/{+campaignExperiment}:promote';
my $response_type = 'Google::Ads::GoogleAds::LongRunning::Operation';
return $self->SUPER::call($http_method, $request_path, $request_body,
$response_type);
}
1;
| 30.84466 | 109 | 0.70192 |
ed9dd5fbac436075f63d71f706909594726dc5b1 | 1,401 | pm | Perl | lib/Google/Ads/GoogleAds/V3/Resources/DynamicSearchAdsSearchTermView.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
] | null | null | null | lib/Google/Ads/GoogleAds/V3/Resources/DynamicSearchAdsSearchTermView.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
] | null | null | null | lib/Google/Ads/GoogleAds/V3/Resources/DynamicSearchAdsSearchTermView.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V3::Resources::DynamicSearchAdsSearchTermView;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
hasMatchingKeyword => $args->{hasMatchingKeyword},
hasNegativeKeyword => $args->{hasNegativeKeyword},
hasNegativeUrl => $args->{hasNegativeUrl},
headline => $args->{headline},
landingPage => $args->{landingPage},
pageUrl => $args->{pageUrl},
resourceName => $args->{resourceName},
searchTerm => $args->{searchTerm}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| 32.581395 | 79 | 0.695218 |
edb2ffca71b29cbb52b01ab2137917e5fd6cba27 | 3,725 | pm | Perl | network/hirschmann/standard/snmp/mode/components/psu.pm | alenorcy/centreon-plugins | d7603030c24766935ed07e6ebe1082e16d6fdb4a | [
"Apache-2.0"
] | null | null | null | network/hirschmann/standard/snmp/mode/components/psu.pm | alenorcy/centreon-plugins | d7603030c24766935ed07e6ebe1082e16d6fdb4a | [
"Apache-2.0"
] | 2 | 2016-07-28T10:18:20.000Z | 2017-04-11T14:16:48.000Z | network/hirschmann/standard/snmp/mode/components/psu.pm | alenorcy/centreon-plugins | d7603030c24766935ed07e6ebe1082e16d6fdb4a | [
"Apache-2.0"
] | 1 | 2018-03-20T11:05:05.000Z | 2018-03-20T11:05:05.000Z | #
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package network::hirschmann::standard::snmp::mode::components::psu;
use strict;
use warnings;
my %map_psu_status = (
1 => 'ok',
2 => 'failed',
3 => 'notInstalled',
4 => 'unknown',
);
my %map_psu_state = (
1 => 'error', 2 => 'ignore',
);
my %map_psid = (
1 => 9, # hmDevMonSensePS1State
2 => 10, # hmDevMonSensePS2State
3 => 14, # hmDevMonSensePS3State
4 => 15, # hmDevMonSensePS4State
5 => 17, # hmDevMonSensePS5State
6 => 18, # hmDevMonSensePS6State
7 => 19, # hmDevMonSensePS7State
8 => 20, # hmDevMonSensePS8State
);
# In MIB 'hmpriv.mib'
my $mapping = {
hmPSState => { oid => '.1.3.6.1.4.1.248.14.1.2.1.3', map => \%map_psu_status },
};
my $oid_hmDevMonConfigEntry = '.1.3.6.1.4.1.248.14.2.12.3.1';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $mapping->{hmPSState}->{oid} }, { oid => $oid_hmDevMonConfigEntry };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking power supplies");
$self->{components}->{psu} = { name => 'psus', total => 0, skip => 0 };
return if ($self->check_filter(section => 'psu'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$mapping->{hmPSState}->{oid}}})) {
next if ($oid !~ /^$mapping->{hmPSState}->{oid}\.(\d+)\.(\d+)$/);
my $instance = $1 . '.' . $2;
my ($sysid, $psid) = ($1, $2);
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$mapping->{hmPSState}->{oid}}, instance => $instance);
if (defined($map_psid{$psid}) &&
defined($self->{results}->{$oid_hmDevMonConfigEntry}->{$oid_hmDevMonConfigEntry . '.' . $map_psid{$psid} . '.' . $sysid})) {
my $state = $map_psu_state{$self->{results}->{$oid_hmDevMonConfigEntry}->{$oid_hmDevMonConfigEntry . '.' . $map_psid{$psid} . '.' . $sysid}};
$result->{hmPSState} = 'ignore' if ($state eq 'ignore');
}
next if ($self->check_filter(section => 'psu', instance => $instance));
next if ($result->{hmPSState} =~ /notInstalled/i &&
$self->absent_problem(section => 'psu', instance => $instance));
$self->{components}->{psu}->{total}++;
$self->{output}->output_add(long_msg => sprintf("Power supply '%s' status is %s [instance: %s].",
$instance, $result->{hmPSState},
$instance
));
my $exit = $self->get_severity(section => 'psu', value => $result->{hmPSState});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Power supply '%s' status is %s",
$instance, $result->{hmPSState}));
}
}
}
1;
| 38.802083 | 154 | 0.572886 |
edbdf8e1fe2c8767b07caae4da183fc971304ac7 | 3,215 | pm | Perl | lib/App/Manoc/DataDumper/Script.pm | gmambro/manoc | 1c25734028970b4fda548c9d4aa8cc47eb67bc6d | [
"Artistic-1.0"
] | 3 | 2015-06-21T18:23:17.000Z | 2017-06-11T23:19:29.000Z | lib/App/Manoc/DataDumper/Script.pm | gmambro/manoc | 1c25734028970b4fda548c9d4aa8cc47eb67bc6d | [
"Artistic-1.0"
] | 34 | 2015-06-20T07:27:30.000Z | 2022-01-17T10:28:28.000Z | lib/App/Manoc/DataDumper/Script.pm | gmambro/manoc | 1c25734028970b4fda548c9d4aa8cc47eb67bc6d | [
"Artistic-1.0"
] | 1 | 2019-11-18T08:15:03.000Z | 2019-11-18T08:15:03.000Z | package App::Manoc::DataDumper::Script;
#ABSTRACT: Manoc Netwalker script runner
=head1 DESCRIPTION
This class is responsible for running the manoc dumper scripst. It extends
L<App::Manoc::Script> and can operate in two modes: load or save.
=cut
use Moose;
##VERSION
extends 'App::Manoc::Script';
use App::Manoc::Support;
use App::Manoc::DataDumper;
use App::Manoc::DataDumper::Data;
use App::Manoc::Logger;
use File::Temp;
use File::Spec;
use Archive::Tar;
use Try::Tiny;
use YAML::Syck;
has 'enable_fk' => (
is => 'rw',
isa => 'Bool',
required => 0,
default => 0
);
has 'overwrite' => (
is => 'rw',
isa => 'Bool',
required => 0,
default => 0
);
has 'force' => (
is => 'rw',
isa => 'Bool',
required => 0,
default => 0
);
has 'skip_notempty' => (
is => 'rw',
isa => 'Bool',
required => 0,
default => 0
);
has 'load' => (
is => 'rw',
isa => 'Str',
);
has 'save' => (
is => 'rw',
isa => 'Str',
);
has 'include' => (
is => 'rw',
isa => 'ArrayRef',
default => sub { [] },
);
has 'exclude' => (
is => 'rw',
isa => 'ArrayRef',
default => sub { [] },
);
=method run_save
Implements the save command to dump database to a datadumper file.
=cut
sub run_save {
my ($self) = @_;
$self->log->info("Beginning dump of database");
my $datadumper = App::Manoc::DataDumper->new(
{
filename => $self->save,
schema => $self->schema,
log => $self->log,
include => $self->include,
exclude => $self->exclude,
config => $self->config,
}
);
$datadumper->save;
}
=method run_load
Implements the load command to restore database from a datadumper file.
=cut
sub run_load {
my $self = shift;
$self->log->info('Beginning database restore...');
my $datadumper = App::Manoc::DataDumper->new(
{
filename => $self->load,
schema => $self->schema,
log => $self->log,
include => $self->include,
exclude => $self->exclude,
config => $self->config,
skip_notempty => $self->skip_notempty,
enable_fk => $self->enable_fk,
overwrite => $self->overwrite,
force => $self->force
}
);
$datadumper->load();
}
=method run
The script entry point.
=cut
sub run {
my $self = shift;
if ( @{ $self->include } == 1 ) {
$self->include( [ split /\s*,\s*/, $self->include->[0] ] );
}
if ( @{ $self->exclude } == 1 ) {
$self->exclude( [ split /\s*,\s*/, $self->exclude->[0] ] );
}
$self->load and return $self->run_load( $ARGV[1] );
$self->save and return $self->run_save( $ARGV[1] );
print STDERR "You must specify --load or --save\n";
print STDERR $self->usage;
exit 1;
}
no Moose;
__PACKAGE__->meta->make_immutable( inline_constructor => 0 );
1;
# Local Variables:
# mode: cperl
# indent-tabs-mode: nil
# cperl-indent-level: 4
# cperl-indent-parens-as-block: t
# End:
| 19.136905 | 74 | 0.517885 |
ed846a16d9167c43975a1e80ee04bdaf96fe3b29 | 21,207 | t | Perl | t/serialization/02-types.t | mryan/perl6-nqp | 687e2f12c866ee1498229ac1ac234ff56b7d02b8 | [
"MIT"
] | null | null | null | t/serialization/02-types.t | mryan/perl6-nqp | 687e2f12c866ee1498229ac1ac234ff56b7d02b8 | [
"MIT"
] | null | null | null | t/serialization/02-types.t | mryan/perl6-nqp | 687e2f12c866ee1498229ac1ac234ff56b7d02b8 | [
"MIT"
] | null | null | null | #! nqp
use nqpmo;
plan(58);
sub add_to_sc($sc, $idx, $obj) {
nqp::scsetobj($sc, $idx, $obj);
nqp::setobjsc($obj, $sc);
}
# Serializing a knowhow with no attributes and no methods; P6int REPR
# (very simple REPR).
{
my $sc := nqp::createsc('TEST_SC_1_IN');
my $sh := nqp::list_s();
my $type := nqp::knowhow().new_type(:name('Badger'), :repr('P6int'));
$type.HOW.compose($type);
add_to_sc($sc, 0, $type);
add_to_sc($sc, 1, nqp::box_i(42, $type));
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_1_OUT');
nqp::deserialize($serialized, $dsc, $sh, nqp::list(), nqp::null());
ok(nqp::scobjcount($dsc) >= 2, 'deserialized SC has at least the knowhow type and its instance');
ok(!nqp::isconcrete(nqp::scgetobj($dsc, 0)), 'type object deserialized and is not concrete');
ok(nqp::isconcrete(nqp::scgetobj($dsc, 1)), 'instance deserialized and is concrete');
ok(nqp::unbox_i(nqp::scgetobj($dsc, 1)) == 42, 'serialized P6int instance has correct value');
ok(nqp::istype(nqp::scgetobj($dsc, 1), nqp::scgetobj($dsc, 0)), 'type checking is OK after deserialization');
ok(nqp::scgetobj($dsc, 0).HOW.name(nqp::scgetobj($dsc, 0)) eq 'Badger', 'meta-object deserialized along with name');
}
# Serializing a type using P6opaque, which declares an attribute, along
# with an instance of it.
{
my $sc := nqp::createsc('TEST_SC_2_IN');
my $sh := nqp::list_s();
my $type := nqp::knowhow().new_type(:name('Dugong'), :repr('P6opaque'));
$type.HOW.add_attribute($type, nqp::knowhowattr().new(name => '$!home'));
$type.HOW.compose($type);
add_to_sc($sc, 0, $type);
my $instance := nqp::create($type);
nqp::bindattr($instance, $type, '$!home', 'Sea');
add_to_sc($sc, 1, $instance);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_2_OUT');
nqp::deserialize($serialized, $dsc, $sh, nqp::list(), nqp::null());
ok(nqp::scobjcount($dsc) >= 2, 'deserialized SC has at least the knowhow type and its instance');
ok(!nqp::isconcrete(nqp::scgetobj($dsc, 0)), 'type object deserialized and is not concrete');
ok(nqp::isconcrete(nqp::scgetobj($dsc, 1)), 'instance deserialized and is concrete');
ok(nqp::istype(nqp::scgetobj($dsc, 1), nqp::scgetobj($dsc, 0)), 'type checking is OK after deserialization');
ok(nqp::scgetobj($dsc, 0).HOW.name(nqp::scgetobj($dsc, 0)) eq 'Dugong', 'meta-object deserialized along with name');
ok(nqp::getattr(nqp::scgetobj($dsc, 1), nqp::scgetobj($dsc, 0), '$!home') eq 'Sea',
'attribute declared in P6opaque-based type is OK');
}
# Serializing a P6opaque type with natively typed attributes, this time using NQPClassHOW.
{
my $sc := nqp::createsc('TEST_SC_3_IN');
my $sh := nqp::list_s();
my $type := NQPClassHOW.new_type(:name('Badger'), :repr('P6opaque'));
$type.HOW.add_attribute($type, NQPAttribute.new(name => '$!eats', type => str));
$type.HOW.add_attribute($type, NQPAttribute.new(name => '$!age', type => int));
$type.HOW.add_attribute($type, NQPAttribute.new(name => '$!weight', type => num));
$type.HOW.add_parent($type, NQPMu);
$type.HOW.compose($type);
add_to_sc($sc, 0, $type);
my $instance := nqp::create($type);
nqp::bindattr_s($instance, $type, '$!eats', 'mushrooms');
nqp::bindattr_i($instance, $type, '$!age', 5);
nqp::bindattr_n($instance, $type, '$!weight', 2.3);
add_to_sc($sc, 1, $instance);
my $defaults := nqp::create($type);
add_to_sc($sc, 2, $defaults);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_3_OUT');
nqp::deserialize($serialized, $dsc, $sh, nqp::list(), nqp::null());
ok(nqp::scobjcount($dsc) >= 2, 'deserialized SC has at least the knowhow type and its instance');
ok(!nqp::isconcrete(nqp::scgetobj($dsc, 0)), 'type object deserialized and is not concrete');
ok(nqp::isconcrete(nqp::scgetobj($dsc, 1)), 'instance deserialized and is concrete');
ok(nqp::istype(nqp::scgetobj($dsc, 1), nqp::scgetobj($dsc, 0)), 'type checking is OK after deserialization');
ok(nqp::scgetobj($dsc, 0).HOW.name(nqp::scgetobj($dsc, 0)) eq 'Badger', 'meta-object deserialized along with name');
ok(nqp::getattr_s(nqp::scgetobj($dsc, 1), nqp::scgetobj($dsc, 0), '$!eats') eq 'mushrooms',
'str attribute declared in P6opaque-based type is OK');
ok(nqp::getattr_i(nqp::scgetobj($dsc, 1), nqp::scgetobj($dsc, 0), '$!age') == 5,
'int attribute declared in P6opaque-based type is OK');
ok(nqp::getattr_n(nqp::scgetobj($dsc, 1), nqp::scgetobj($dsc, 0), '$!weight') == 2.3,
'num attribute declared in P6opaque-based type is OK');
ok(nqp::isnull_s(nqp::getattr_s(nqp::scgetobj($dsc, 2), nqp::scgetobj($dsc, 0), '$!eats')),
'default str value is OK');
ok(nqp::getattr_i(nqp::scgetobj($dsc, 2), nqp::scgetobj($dsc, 0), '$!age') == 0,
'default int value is OK');
ok(nqp::getattr_n(nqp::scgetobj($dsc, 2), nqp::scgetobj($dsc, 0), '$!weight') == 0,
'default num value is OK');
my $other_instance := nqp::create(nqp::scgetobj($dsc, 0));
ok(nqp::isconcrete($other_instance), 'can make new instance of deserialized type');
nqp::bindattr_s($other_instance, nqp::scgetobj($dsc, 0), '$!eats', 'snakes');
nqp::bindattr_i($other_instance, nqp::scgetobj($dsc, 0), '$!age', 10);
nqp::bindattr_n($other_instance, nqp::scgetobj($dsc, 0), '$!weight', 3.4);
ok(nqp::getattr_s($other_instance, nqp::scgetobj($dsc, 0), '$!eats') eq 'snakes',
'str attribute in new instance OK');
ok(nqp::getattr_i($other_instance, nqp::scgetobj($dsc, 0), '$!age') == 10,
'int attribute in new instance OK');
ok(nqp::getattr_n($other_instance, nqp::scgetobj($dsc, 0), '$!weight') == 3.4,
'num attribute in new instance OK');
}
# Serializing a type with methods (P6opaque REPR, NQPClassHOW)
{
my $sc := nqp::createsc('TEST_SC_4_IN');
my $sh := nqp::list_s();
my $m1 := method () { "awful" };
my $m2 := method () { "Hi, I'm " ~ nqp::getattr(self, self.WHAT, '$!name') };
nqp::scsetcode($sc, 0, $m1);
nqp::scsetcode($sc, 1, $m2);
nqp::markcodestatic($m1);
nqp::markcodestatic($m2);
my $type := NQPClassHOW.new_type(:name('Llama'), :repr('P6opaque'));
$type.HOW.add_attribute($type, NQPAttribute.new(name => '$!name'));
$type.HOW.add_method($type, 'smell', $m1);
$type.HOW.add_method($type, 'intro', $m2);
$type.HOW.add_parent($type, NQPMu);
$type.HOW.compose($type);
add_to_sc($sc, 0, $type);
my $instance := nqp::create($type);
nqp::bindattr($instance, $type, '$!name', 'Bob');
add_to_sc($sc, 1, $instance);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_4_OUT');
my $cr := nqp::list($m1, $m2);
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
ok(nqp::scobjcount($dsc) >= 2, 'deserialized SC has at least the knowhow type and its instance');
ok(!nqp::isconcrete(nqp::scgetobj($dsc, 0)), 'type object deserialized and is not concrete');
ok(nqp::isconcrete(nqp::scgetobj($dsc, 1)), 'instance deserialized and is concrete');
ok(nqp::istype(nqp::scgetobj($dsc, 1), nqp::scgetobj($dsc, 0)), 'type checking is OK after deserialization');
ok(nqp::scgetobj($dsc, 0).smell eq 'awful', 'method call on deserialized type object ok');
ok(nqp::scgetobj($dsc, 1).smell eq 'awful', 'method call on deserialized instance object ok');
ok(nqp::scgetobj($dsc, 1).intro eq "Hi, I'm Bob", 'method call accessing instance attributes ok');
}
# Serializing a type with boolification (P6opaque REPR, NQPClassHOW)
{
my $sc := nqp::createsc('TEST_SC_5_IN');
my $sh := nqp::list_s();
my $half-true := method () {
nqp::bindattr(self, self.WHAT, '$!bool', !nqp::getattr(self, self.WHAT, '$!bool'));
nqp::getattr(self, self.WHAT, '$!bool')
};
nqp::scsetcode($sc, 0, $half-true);
nqp::markcodestatic($half-true);
my $type := NQPClassHOW.new_type(:name('Llama'), :repr('P6opaque'));
$type.HOW.add_attribute($type, NQPAttribute.new(name => '$!bool'));
$type.HOW.add_method($type, 'half-true', $half-true);
$type.HOW.add_parent($type, NQPMu);
$type.HOW.compose($type);
nqp::setboolspec($type, 0, $half-true);
my $instance := nqp::create($type);
nqp::bindattr($instance, $type, '$!bool', 1);
add_to_sc($sc, 0, $instance);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_5_OUT');
my $cr := nqp::list($half-true);
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
ok(nqp::scobjcount($dsc) >= 2, 'deserialized SC has at least the knowhow type and its instance');
my $obj := nqp::scgetobj($dsc, 0);
ok(nqp::istrue($obj) == 0, "checking our custom boolifier is called... 1/3");
ok(nqp::istrue($obj) == 1, "checking our custom boolifier is called... 2/3");
ok(nqp::istrue($obj) == 0, "checking our custom boolifier is called... 3/3");
}
# Serializing a type with a invocation spec (P6opaque REPR, NQPClassHOW)
{
my $sc := nqp::createsc('TEST_SC_6_IN');
my $sh := nqp::list_s();
my $invoke := sub ($invoke) {
700
};
nqp::scsetcode($sc, 0, $invoke);
nqp::markcodestatic($invoke);
my $code := sub () {
800
};
nqp::scsetcode($sc, 1, $code);
nqp::markcodestatic($code);
my $type := NQPClassHOW.new_type(:name('type1'), :repr('P6opaque'));
$type.HOW.add_parent($type, NQPMu);
$type.HOW.compose($type);
nqp::setinvokespec($type, nqp::null(), nqp::null_s(), $invoke);
my $type2 := NQPClassHOW.new_type(:name('type2'), :repr('P6opaque'));
$type2.HOW.add_parent($type2, NQPMu);
$type2.HOW.add_attribute($type, NQPAttribute.new(name => '$!code'));
$type2.HOW.compose($type2);
nqp::setinvokespec($type2, $type2, '$!code', nqp::null());
my $instance := nqp::create($type);
add_to_sc($sc, 0, $instance);
my $instance2 := nqp::create($type2);
nqp::bindattr($instance2, $type2, '$!code', $code);
add_to_sc($sc, 1, $instance2);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_6_OUT');
my $cr := nqp::list($invoke, $code);
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
my $obj := nqp::scgetobj($dsc, 0);
ok($obj() == 700, "invokespec with invokeHandler survived serialization");
my $obj2 := nqp::scgetobj($dsc, 1);
ok($obj2() == 800, "invokespec with attribute survived serialization");
}
# Serializing a type with box_target attribute
{
my $sc := nqp::createsc('TEST_SC_7_IN');
my $sh := nqp::list_s();
my $type := NQPClassHOW.new_type(:name('boxing_test'), :repr('P6opaque'));
$type.HOW.add_attribute($type, NQPAttribute.new(
:name('$!value'), :type(int), :box_target(1)
));
$type.HOW.add_parent($type, NQPMu);
$type.HOW.compose($type);
add_to_sc($sc, 0, $type);
my $instance := nqp::box_i(4, $type);
add_to_sc($sc, 1, $instance);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_7_OUT');
my $cr := nqp::list();
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
ok(nqp::unbox_i(nqp::box_i(7, nqp::scgetobj($dsc, 0))) == 7, "can use deserialized type for boxing");
ok(nqp::unbox_i(nqp::scgetobj($dsc, 1)) == 4, "can unbox deserialized object - int");
}
# Serializing a P6bigint repr
{
my $sc := nqp::createsc('TEST_SC_8_IN');
my $sh := nqp::list_s();
my $knowhow := nqp::knowhow();
my $type := $knowhow.new_type(:name('TestBigInt'), :repr('P6bigint'));
$type.HOW.compose($type);
add_to_sc($sc, 0, $type);
my $instance := nqp::box_i(147, $type);
add_to_sc($sc, 1, $instance);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_8_OUT');
my $cr := nqp::list();
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
ok(nqp::unbox_i(nqp::scgetobj($dsc, 1)) == 147, "can unbox serialized bigint");
}
# Serializing a type with box_target attribute and P6bigint type
{
my $bi_type := NQPClassHOW.new_type(:name('TestBigInt'), :repr('P6bigint'));
$bi_type.HOW.add_parent($bi_type, NQPMu);
$bi_type.HOW.compose($bi_type);
my $sc := nqp::createsc('TEST_SC_9_IN');
my $sh := nqp::list_s();
my $type := NQPClassHOW.new_type(:name('boxing_test'), :repr('P6opaque'));
$type.HOW.add_attribute($type, NQPAttribute.new(
:name('$!value'), :type($bi_type), :box_target(1)
));
$type.HOW.add_parent($type, NQPMu);
$type.HOW.compose($type);
add_to_sc($sc, 0, $bi_type);
my $instance := nqp::box_i(4, $bi_type);
add_to_sc($sc, 1, $instance);
my $instance2 := nqp::box_i(5, $bi_type);
add_to_sc($sc, 2, $instance2);
add_to_sc($sc, 3, $type);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_9_OUT');
my $cr := nqp::list();
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
ok(nqp::unbox_i(nqp::box_i(7, nqp::scgetobj($dsc, 0))) == 7, "can use deserialized type for boxing - got " ~ nqp::scgetobj($dsc,0));
ok(nqp::unbox_i(nqp::box_i(8, nqp::scgetobj($dsc, 3))) == 8, "can use deserialized type for boxing");
ok(nqp::unbox_i(nqp::scgetobj($dsc, 1)) == 4, "can unbox bigint obj");
ok(nqp::unbox_i(nqp::scgetobj($dsc, 2)) == 5, "can unbox autoboxed bigint obj");
}
# Serializing a parameterized type
{
my $sc := nqp::createsc('TEST_SC_10_IN');
my $sh := nqp::list_s();
my $cr := nqp::list();
my $count := 0;
class SimpleCoerceHOW {
method new_type() {
my $type := nqp::newtype(self.new(), 'Uninstantiable');
my $parameterizer := -> $type, $params {
# Re-use same HOW.
$count := $count + 1;
nqp::newtype($type.HOW, 'Uninstantiable');
}
$cr[0] := $parameterizer;
nqp::scsetcode($sc, 0, $parameterizer);
nqp::markcodestatic($parameterizer);
nqp::setparameterizer($type, $parameterizer);
$type
}
method parameterize($type, $params) {
nqp::parameterizetype($type, $params);
}
}
my $with_param := SimpleCoerceHOW.new_type();
my $hi := $with_param.HOW.parameterize($with_param, ["Hi"]);
class Foo {}
class Bar {}
my $with_foo := $with_param.HOW.parameterize($with_param, [Foo]);
add_to_sc($sc, 0, $with_param);
add_to_sc($sc, 1, $hi);
add_to_sc($sc, 2, $with_foo);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_10_OUT');
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
my $type := nqp::scgetobj($dsc, 0);
my $hello := $type.HOW.parameterize($type, ["Hello"]);
ok(nqp::typeparameterat($hello, 0) eq "Hello", "We can serialize a parameteric type");
my $dsc_hi := nqp::scgetobj($dsc, 1);
ok(nqp::typeparameterat($dsc_hi, 0) eq "Hi", "We can serialize a parameterized type");
my $dsc_with_foo := nqp::scgetobj($dsc, 2);
ok(nqp::eqaddr(nqp::typeparameterat($dsc_with_foo, 0), Foo), "Type parameterized with type object is serialized correctly");
my $new_with_foo := $type.HOW.parameterize($type, [Foo]);
my $new_with_bar := $type.HOW.parameterize($type, [Bar]);
ok(nqp::eqaddr(nqp::typeparameterat($new_with_foo, 0), Foo), "We can parameterize with a type object using a deserialized parameterizer ");
skip('Fails on JVM', 1);
#ok(nqp::eqaddr($new_with_foo, $dsc_with_foo), "We get stuff from the type cache");
ok(!nqp::eqaddr($new_with_bar, $dsc_with_foo), "Parameterizing with a type object that's not in cache");
}
# Serializing a type with HLL owner
{
my $type := NQPClassHOW.new_type(:name('hll test'), :repr('P6opaque'));
$type.HOW.add_parent($type, NQPMu);
$type.HOW.compose($type);
nqp::settypehll($type, "foo");
class Baz {
}
nqp::sethllconfig('foo', nqp::hash(
'foreign_transform_array', -> $array {
nqp::list('fooifed');
}
));
nqp::sethllconfig('baz', nqp::hash(
'foreign_transform_array', -> $array {
Baz;
},
));
my $sc := nqp::createsc('TEST_SC_11_IN');
my $sh := nqp::list_s();
my $cr := nqp::list();
add_to_sc($sc, 0, $type);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_11_OUT');
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
my $obj := nqp::scgetobj($dsc, 0).new;
nqp::settypehllrole(nqp::scgetobj($dsc, 0), 4);
ok(nqp::eqaddr(nqp::hllizefor($obj, "foo"), $obj), "correct hll prevents convertion");
ok(nqp::eqaddr(nqp::hllizefor($obj, "baz"), Baz), "in this case it's converted anyway");
}
# Serializing a type with HLL role
{
my $type := NQPClassHOW.new_type(:name('hll test'), :repr('P6opaque'));
$type.HOW.add_parent($type, NQPMu);
$type.HOW.compose($type);
nqp::settypehllrole($type, 4);
nqp::sethllconfig('somelang', nqp::hash(
'foreign_transform_array', -> $array {
nqp::list('fooifed');
}
));
my $sc := nqp::createsc('TEST_SC_12_IN');
my $sh := nqp::list_s();
my $cr := nqp::list();
add_to_sc($sc, 0, $type);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_12_OUT');
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
my $obj := nqp::scgetobj($dsc, 0).new;
my $hllized := nqp::hllizefor($obj, "somelang");
ok(nqp::atpos($hllized, 0) eq "fooifed", "hll role is preserved correctly");
}
# Setting the type check mode nqp::const::TYPE_CHECK_NEEDS_ACCEPTS is preserved by serialization
{
my class AcceptingType {
has int $!accepts_type_called;
has $!accepts;
method accepts_type_called() {
$!accepts_type_called;
}
method accepts_type($type, $obj) {
$!accepts_type_called := $!accepts_type_called + 1;
$!accepts;
}
method new_type() {
nqp::newtype(self, 'Uninstantiable');
}
}
my class Bar { }
my $type := AcceptingType.new(accepts => 1).new_type;
nqp::composetype($type, nqp::hash());
my $sc := nqp::createsc('TEST_SC_13_IN');
my $sh := nqp::list_s();
my $cr := nqp::list();
add_to_sc($sc, 0, $type);
nqp::settypecheckmode($type, nqp::const::TYPE_CHECK_NEEDS_ACCEPTS);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_13_OUT');
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
my $dsc_type := nqp::scgetobj($dsc, 0);
ok(nqp::istype(Bar, $dsc_type), 'nqp::const::TYPE_CHECK_NEEDS_ACCEPTS is preserved after serialization');
is($dsc_type.HOW.accepts_type_called, 1, 'accepts_type is called when needed');
}
{
my $sc := nqp::createsc('TEST_SC_14_IN');
my $sh := nqp::list_s();
my $parameterizer := -> $type, $params {
nqp::newtype($type.HOW, 'Uninstantiable');
};
nqp::scsetcode($sc, 0, $parameterizer);
nqp::markcodestatic($parameterizer);
my $cr := nqp::list($parameterizer);
my $parametric_type := NQPClassHOW.new_type(:name('Parametric'), :repr('P6opaque'));
$parametric_type.HOW.add_parent($parametric_type, NQPMu);
nqp::setparameterizer($parametric_type, $parameterizer);
$parametric_type.HOW.compose($parametric_type);
add_to_sc($sc, 0, $parametric_type);
my $param_type := NQPClassHOW.new_type(:name('ParamType'), :repr('P6opaque'));
$param_type.HOW.add_parent($param_type, NQPMu);
$param_type.HOW.compose($param_type);
add_to_sc($sc, 1, $param_type);
my $serialized := nqp::serialize($sc, $sh);
my $dsc := nqp::createsc('TEST_SC_14_OUT');
nqp::deserialize($serialized, $dsc, $sh, $cr, nqp::null());
my $dsc_parametric_type := nqp::scgetobj($dsc, 0);
my $dsc_param_type := nqp::scgetobj($dsc, 1);
my $parameterized1 := nqp::parameterizetype($dsc_parametric_type, [$dsc_param_type]);
my $sc2 := nqp::createsc('TEST_SC_15_IN');
my $sh2 := nqp::list_s();
add_to_sc($sc2, 0, $parameterized1);
my $serialized2 := nqp::serialize($sc2, $sh2);
my $cr2 := nqp::list();
my $dsc2 := nqp::createsc('TEST_SC_15_OUT');
nqp::deserialize($serialized2, $dsc2, $sh2, $cr2, nqp::null());
my $dsc3 := nqp::createsc('TEST_SC_16_OUT');
nqp::deserialize($serialized2, $dsc3, $sh2, $cr2, nqp::null());
my $dsc_parameterized_a := nqp::scgetobj($dsc2, 0);
my $dsc_parameterized_b := nqp::scgetobj($dsc3, 0);
}
| 35.404007 | 143 | 0.59801 |
ed916b5b6828b5aac8178e490840609574b9f1c4 | 4,652 | pm | Perl | Benchmarks/Recomputation/specOMP_install/bin/lib/PDF/API2/Basic/TTF/Glyf.pm | sqsq87/NVC | 1ed478788978e3e85c219313cd55564d4037e242 | [
"MIT"
] | null | null | null | Benchmarks/Recomputation/specOMP_install/bin/lib/PDF/API2/Basic/TTF/Glyf.pm | sqsq87/NVC | 1ed478788978e3e85c219313cd55564d4037e242 | [
"MIT"
] | null | null | null | Benchmarks/Recomputation/specOMP_install/bin/lib/PDF/API2/Basic/TTF/Glyf.pm | sqsq87/NVC | 1ed478788978e3e85c219313cd55564d4037e242 | [
"MIT"
] | null | null | null | #=======================================================================
# ____ ____ _____ _ ____ ___ ____
# | _ \| _ \| ___| _ _ / \ | _ \_ _| |___ \
# | |_) | | | | |_ (_) (_) / _ \ | |_) | | __) |
# | __/| |_| | _| _ _ / ___ \| __/| | / __/
# |_| |____/|_| (_) (_) /_/ \_\_| |___| |_____|
#
# A Perl Module Chain to faciliate the Creation and Modification
# of High-Quality "Portable Document Format (PDF)" Files.
#
#=======================================================================
#
# THIS IS A REUSED PERL MODULE, FOR PROPER LICENCING TERMS SEE BELOW:
#
#
# Copyright Martin Hosken <[email protected]>
#
# No warranty or expression of effectiveness, least of all regarding
# anyone's safety, is implied in this software or documentation.
#
# This specific module is licensed under the Perl Artistic License.
#
#
# $Id: Glyf.pm,v 2.0 2005/11/16 02:16:00 areibens Exp $
#
#=======================================================================
package PDF::API2::Basic::TTF::Glyf;
=head1 NAME
PDF::API2::Basic::TTF::Glyf - The Glyf data table
=head1 DESCRIPTION
This is a stub table. The real data is held in the loca table. If you want to get a glyf
look it up in the loca table as C<$f->{'loca'}{'glyphs'}[$num]>. It won't be here!
The difference between reading this table as opposed to the loca table is that
reading this table will cause updated glyphs to be written out rather than just
copying the glyph information from the input file. This causes font writing to be
slower. So read the glyf as opposed to the loca table if you want to change glyf
data. Read the loca table only if you are just wanting to read the glyf information.
This class is used when writing the glyphs though.
=head1 METHODS
=cut
use strict;
use vars qw(@ISA);
@ISA = qw(PDF::API2::Basic::TTF::Table);
=head2 $t->read
Reads the C<loca> table instead!
=cut
sub read
{
my ($self) = @_;
$self->{' PARENT'}{'loca'}->read;
$self->{' read'} = 1;
$self;
}
=head2 $t->out($fh)
Writes out all the glyphs in the parent's location table, calculating a new
output location for each one.
=cut
sub out
{
my ($self, $fh) = @_;
my ($i, $loca, $offset, $numGlyphs);
return $self->SUPER::out($fh) unless $self->{' read'};
$loca = $self->{' PARENT'}{'loca'}{'glyphs'};
$numGlyphs = $self->{' PARENT'}{'maxp'}{'numGlyphs'};
$offset = 0;
for ($i = 0; $i < $numGlyphs; $i++)
{
next unless defined $loca->[$i];
$loca->[$i]->update;
$loca->[$i]{' OUTLOC'} = $offset;
$loca->[$i]->out($fh);
$offset += $loca->[$i]{' OUTLEN'};
}
$self->{' PARENT'}{'head'}{'indexToLocFormat'} = ($offset >= 0x20000);
$self;
}
=head2 $t->out_xml($context, $depth)
Outputs all the glyphs in the glyph table just where they are supposed to be output!
=cut
sub out_xml
{
my ($self, $context, $depth) = @_;
my ($fh) = $context->{'fh'};
my ($loca, $i, $numGlyphs);
$loca = $self->{' PARENT'}{'loca'}{'glyphs'};
$numGlyphs = $self->{' PARENT'}{'maxp'}{'numGlyphs'};
for ($i = 0; $i < $numGlyphs; $i++)
{
$context->{'gid'} = $i;
$loca->[$i]->out_xml($context, $depth) if (defined $loca->[$i]);
}
$self;
}
=head2 $t->XML_start($context, $tag, %attrs)
Pass control to glyphs as they occur
=cut
sub XML_start
{
my ($self) = shift;
my ($context, $tag, %attrs) = @_;
if ($tag eq 'glyph')
{
$context->{'tree'}[-1] = PDF::API2::Basic::TTF::Glyph->new(read => 2, PARENT => $self->{' PARENT'});
$context->{'receiver'} = $context->{'tree'}[-1];
}
}
=head2 $t->XML_end($context, $tag, %attrs)
Collect up glyphs and put them into the loca table
=cut
sub XML_end
{
my ($self) = shift;
my ($context, $tag, %attrs) = @_;
if ($tag eq 'glyph')
{
unless (defined $context->{'glyphs'})
{
if (defined $self->{' PARENT'}{'loca'})
{ $context->{'glyphs'} = $self->{' PARENT'}{'loca'}{'glyphs'}; }
else
{ $context->{'glyphs'} = []; }
}
$context->{'glyphs'}[$attrs{'gid'}] = $context->{'tree'}[-1];
return $context;
} else
{ return $self->SUPER::XML_end(@_); }
}
1;
=head1 BUGS
None known
=head1 AUTHOR
Martin Hosken [email protected]. See L<PDF::API2::Basic::TTF::Font> for copyright and
licensing.
=cut
| 25.145946 | 109 | 0.524291 |
edd6d6a10ce2edf6cdc9f9d181e97664c3135094 | 14,819 | pm | Perl | auto-lib/Paws/MQ.pm | shogo82148/aws-sdk-perl | a87555a9d30dd1415235ebacd2715b2f7e5163c7 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/MQ.pm | shogo82148/aws-sdk-perl | a87555a9d30dd1415235ebacd2715b2f7e5163c7 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/MQ.pm | shogo82148/aws-sdk-perl | a87555a9d30dd1415235ebacd2715b2f7e5163c7 | [
"Apache-2.0"
] | null | null | null | package Paws::MQ;
use Moose;
sub service { 'mq' }
sub signing_name { 'mq' }
sub version { '2017-11-27' }
sub flattened_arrays { 0 }
has max_attempts => (is => 'ro', isa => 'Int', default => 5);
has retry => (is => 'ro', isa => 'HashRef', default => sub {
{ base => 'rand', type => 'exponential', growth_factor => 2 }
});
has retriables => (is => 'ro', isa => 'ArrayRef', default => sub { [
] });
with 'Paws::API::Caller', 'Paws::API::EndpointResolver', 'Paws::Net::V4Signature', 'Paws::Net::RestJsonCaller';
sub CreateBroker {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::CreateBroker', @_);
return $self->caller->do_call($self, $call_object);
}
sub CreateConfiguration {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::CreateConfiguration', @_);
return $self->caller->do_call($self, $call_object);
}
sub CreateTags {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::CreateTags', @_);
return $self->caller->do_call($self, $call_object);
}
sub CreateUser {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::CreateUser', @_);
return $self->caller->do_call($self, $call_object);
}
sub DeleteBroker {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::DeleteBroker', @_);
return $self->caller->do_call($self, $call_object);
}
sub DeleteTags {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::DeleteTags', @_);
return $self->caller->do_call($self, $call_object);
}
sub DeleteUser {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::DeleteUser', @_);
return $self->caller->do_call($self, $call_object);
}
sub DescribeBroker {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::DescribeBroker', @_);
return $self->caller->do_call($self, $call_object);
}
sub DescribeBrokerEngineTypes {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::DescribeBrokerEngineTypes', @_);
return $self->caller->do_call($self, $call_object);
}
sub DescribeBrokerInstanceOptions {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::DescribeBrokerInstanceOptions', @_);
return $self->caller->do_call($self, $call_object);
}
sub DescribeConfiguration {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::DescribeConfiguration', @_);
return $self->caller->do_call($self, $call_object);
}
sub DescribeConfigurationRevision {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::DescribeConfigurationRevision', @_);
return $self->caller->do_call($self, $call_object);
}
sub DescribeUser {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::DescribeUser', @_);
return $self->caller->do_call($self, $call_object);
}
sub ListBrokers {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::ListBrokers', @_);
return $self->caller->do_call($self, $call_object);
}
sub ListConfigurationRevisions {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::ListConfigurationRevisions', @_);
return $self->caller->do_call($self, $call_object);
}
sub ListConfigurations {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::ListConfigurations', @_);
return $self->caller->do_call($self, $call_object);
}
sub ListTags {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::ListTags', @_);
return $self->caller->do_call($self, $call_object);
}
sub ListUsers {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::ListUsers', @_);
return $self->caller->do_call($self, $call_object);
}
sub RebootBroker {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::RebootBroker', @_);
return $self->caller->do_call($self, $call_object);
}
sub UpdateBroker {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::UpdateBroker', @_);
return $self->caller->do_call($self, $call_object);
}
sub UpdateConfiguration {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::UpdateConfiguration', @_);
return $self->caller->do_call($self, $call_object);
}
sub UpdateUser {
my $self = shift;
my $call_object = $self->new_with_coercions('Paws::MQ::UpdateUser', @_);
return $self->caller->do_call($self, $call_object);
}
sub ListAllBrokers {
my $self = shift;
my $callback = shift @_ if (ref($_[0]) eq 'CODE');
my $result = $self->ListBrokers(@_);
my $next_result = $result;
if (not defined $callback) {
while ($next_result->NextToken) {
$next_result = $self->ListBrokers(@_, NextToken => $next_result->NextToken);
push @{ $result->BrokerSummaries }, @{ $next_result->BrokerSummaries };
}
return $result;
} else {
while ($result->NextToken) {
$callback->($_ => 'BrokerSummaries') foreach (@{ $result->BrokerSummaries });
$result = $self->ListBrokers(@_, NextToken => $result->NextToken);
}
$callback->($_ => 'BrokerSummaries') foreach (@{ $result->BrokerSummaries });
}
return undef
}
sub operations { qw/CreateBroker CreateConfiguration CreateTags CreateUser DeleteBroker DeleteTags DeleteUser DescribeBroker DescribeBrokerEngineTypes DescribeBrokerInstanceOptions DescribeConfiguration DescribeConfigurationRevision DescribeUser ListBrokers ListConfigurationRevisions ListConfigurations ListTags ListUsers RebootBroker UpdateBroker UpdateConfiguration UpdateUser / }
1;
### main pod documentation begin ###
=head1 NAME
Paws::MQ - Perl Interface to AWS AmazonMQ
=head1 SYNOPSIS
use Paws;
my $obj = Paws->service('MQ');
my $res = $obj->Method(
Arg1 => $val1,
Arg2 => [ 'V1', 'V2' ],
# if Arg3 is an object, the HashRef will be used as arguments to the constructor
# of the arguments type
Arg3 => { Att1 => 'Val1' },
# if Arg4 is an array of objects, the HashRefs will be passed as arguments to
# the constructor of the arguments type
Arg4 => [ { Att1 => 'Val1' }, { Att1 => 'Val2' } ],
);
=head1 DESCRIPTION
Amazon MQ is a managed message broker service for Apache ActiveMQ that
makes it easy to set up and operate message brokers in the cloud. A
message broker allows software applications and components to
communicate using various programming languages, operating systems, and
formal messaging protocols.
For the AWS API documentation, see L<https://docs.aws.amazon.com/amazon-mq/>
=head1 METHODS
=head2 CreateBroker
=over
=item [AutoMinorVersionUpgrade => Bool]
=item [BrokerName => Str]
=item [Configuration => L<Paws::MQ::ConfigurationId>]
=item [CreatorRequestId => Str]
=item [DeploymentMode => Str]
=item [EncryptionOptions => L<Paws::MQ::EncryptionOptions>]
=item [EngineType => Str]
=item [EngineVersion => Str]
=item [HostInstanceType => Str]
=item [Logs => L<Paws::MQ::Logs>]
=item [MaintenanceWindowStartTime => L<Paws::MQ::WeeklyStartTime>]
=item [PubliclyAccessible => Bool]
=item [SecurityGroups => ArrayRef[Str|Undef]]
=item [StorageType => Str]
=item [SubnetIds => ArrayRef[Str|Undef]]
=item [Tags => L<Paws::MQ::__mapOf__string>]
=item [Users => ArrayRef[L<Paws::MQ::User>]]
=back
Each argument is described in detail in: L<Paws::MQ::CreateBroker>
Returns: a L<Paws::MQ::CreateBrokerResponse> instance
Creates a broker. Note: This API is asynchronous.
=head2 CreateConfiguration
=over
=item [EngineType => Str]
=item [EngineVersion => Str]
=item [Name => Str]
=item [Tags => L<Paws::MQ::__mapOf__string>]
=back
Each argument is described in detail in: L<Paws::MQ::CreateConfiguration>
Returns: a L<Paws::MQ::CreateConfigurationResponse> instance
Creates a new configuration for the specified configuration name.
Amazon MQ uses the default configuration (the engine type and version).
=head2 CreateTags
=over
=item ResourceArn => Str
=item [Tags => L<Paws::MQ::__mapOf__string>]
=back
Each argument is described in detail in: L<Paws::MQ::CreateTags>
Returns: nothing
Add a tag to a resource.
=head2 CreateUser
=over
=item BrokerId => Str
=item Username => Str
=item [ConsoleAccess => Bool]
=item [Groups => ArrayRef[Str|Undef]]
=item [Password => Str]
=back
Each argument is described in detail in: L<Paws::MQ::CreateUser>
Returns: a L<Paws::MQ::CreateUserResponse> instance
Creates an ActiveMQ user.
=head2 DeleteBroker
=over
=item BrokerId => Str
=back
Each argument is described in detail in: L<Paws::MQ::DeleteBroker>
Returns: a L<Paws::MQ::DeleteBrokerResponse> instance
Deletes a broker. Note: This API is asynchronous.
=head2 DeleteTags
=over
=item ResourceArn => Str
=item TagKeys => ArrayRef[Str|Undef]
=back
Each argument is described in detail in: L<Paws::MQ::DeleteTags>
Returns: nothing
Removes a tag from a resource.
=head2 DeleteUser
=over
=item BrokerId => Str
=item Username => Str
=back
Each argument is described in detail in: L<Paws::MQ::DeleteUser>
Returns: a L<Paws::MQ::DeleteUserResponse> instance
Deletes an ActiveMQ user.
=head2 DescribeBroker
=over
=item BrokerId => Str
=back
Each argument is described in detail in: L<Paws::MQ::DescribeBroker>
Returns: a L<Paws::MQ::DescribeBrokerResponse> instance
Returns information about the specified broker.
=head2 DescribeBrokerEngineTypes
=over
=item [EngineType => Str]
=item [MaxResults => Int]
=item [NextToken => Str]
=back
Each argument is described in detail in: L<Paws::MQ::DescribeBrokerEngineTypes>
Returns: a L<Paws::MQ::DescribeBrokerEngineTypesResponse> instance
Describe available engine types and versions.
=head2 DescribeBrokerInstanceOptions
=over
=item [EngineType => Str]
=item [HostInstanceType => Str]
=item [MaxResults => Int]
=item [NextToken => Str]
=item [StorageType => Str]
=back
Each argument is described in detail in: L<Paws::MQ::DescribeBrokerInstanceOptions>
Returns: a L<Paws::MQ::DescribeBrokerInstanceOptionsResponse> instance
Describe available broker instance options.
=head2 DescribeConfiguration
=over
=item ConfigurationId => Str
=back
Each argument is described in detail in: L<Paws::MQ::DescribeConfiguration>
Returns: a L<Paws::MQ::DescribeConfigurationResponse> instance
Returns information about the specified configuration.
=head2 DescribeConfigurationRevision
=over
=item ConfigurationId => Str
=item ConfigurationRevision => Str
=back
Each argument is described in detail in: L<Paws::MQ::DescribeConfigurationRevision>
Returns: a L<Paws::MQ::DescribeConfigurationRevisionResponse> instance
Returns the specified configuration revision for the specified
configuration.
=head2 DescribeUser
=over
=item BrokerId => Str
=item Username => Str
=back
Each argument is described in detail in: L<Paws::MQ::DescribeUser>
Returns: a L<Paws::MQ::DescribeUserResponse> instance
Returns information about an ActiveMQ user.
=head2 ListBrokers
=over
=item [MaxResults => Int]
=item [NextToken => Str]
=back
Each argument is described in detail in: L<Paws::MQ::ListBrokers>
Returns: a L<Paws::MQ::ListBrokersResponse> instance
Returns a list of all brokers.
=head2 ListConfigurationRevisions
=over
=item ConfigurationId => Str
=item [MaxResults => Int]
=item [NextToken => Str]
=back
Each argument is described in detail in: L<Paws::MQ::ListConfigurationRevisions>
Returns: a L<Paws::MQ::ListConfigurationRevisionsResponse> instance
Returns a list of all revisions for the specified configuration.
=head2 ListConfigurations
=over
=item [MaxResults => Int]
=item [NextToken => Str]
=back
Each argument is described in detail in: L<Paws::MQ::ListConfigurations>
Returns: a L<Paws::MQ::ListConfigurationsResponse> instance
Returns a list of all configurations.
=head2 ListTags
=over
=item ResourceArn => Str
=back
Each argument is described in detail in: L<Paws::MQ::ListTags>
Returns: a L<Paws::MQ::ListTagsResponse> instance
Lists tags for a resource.
=head2 ListUsers
=over
=item BrokerId => Str
=item [MaxResults => Int]
=item [NextToken => Str]
=back
Each argument is described in detail in: L<Paws::MQ::ListUsers>
Returns: a L<Paws::MQ::ListUsersResponse> instance
Returns a list of all ActiveMQ users.
=head2 RebootBroker
=over
=item BrokerId => Str
=back
Each argument is described in detail in: L<Paws::MQ::RebootBroker>
Returns: a L<Paws::MQ::RebootBrokerResponse> instance
Reboots a broker. Note: This API is asynchronous.
=head2 UpdateBroker
=over
=item BrokerId => Str
=item [AutoMinorVersionUpgrade => Bool]
=item [Configuration => L<Paws::MQ::ConfigurationId>]
=item [EngineVersion => Str]
=item [HostInstanceType => Str]
=item [Logs => L<Paws::MQ::Logs>]
=item [SecurityGroups => ArrayRef[Str|Undef]]
=back
Each argument is described in detail in: L<Paws::MQ::UpdateBroker>
Returns: a L<Paws::MQ::UpdateBrokerResponse> instance
Adds a pending configuration change to a broker.
=head2 UpdateConfiguration
=over
=item ConfigurationId => Str
=item [Data => Str]
=item [Description => Str]
=back
Each argument is described in detail in: L<Paws::MQ::UpdateConfiguration>
Returns: a L<Paws::MQ::UpdateConfigurationResponse> instance
Updates the specified configuration.
=head2 UpdateUser
=over
=item BrokerId => Str
=item Username => Str
=item [ConsoleAccess => Bool]
=item [Groups => ArrayRef[Str|Undef]]
=item [Password => Str]
=back
Each argument is described in detail in: L<Paws::MQ::UpdateUser>
Returns: a L<Paws::MQ::UpdateUserResponse> instance
Updates the information for an ActiveMQ user.
=head1 PAGINATORS
Paginator methods are helpers that repetively call methods that return partial results
=head2 ListAllBrokers(sub { },[MaxResults => Int, NextToken => Str])
=head2 ListAllBrokers([MaxResults => Int, NextToken => Str])
If passed a sub as first parameter, it will call the sub for each element found in :
- BrokerSummaries, passing the object as the first parameter, and the string 'BrokerSummaries' as the second parameter
If not, it will return a a L<Paws::MQ::ListBrokersResponse> instance with all the C<param>s; from all the responses. Please take into account that this mode can potentially consume vasts ammounts of memory.
=head1 SEE ALSO
This service class forms part of L<Paws>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 21.696925 | 385 | 0.704906 |
ed4e37107ea541e27ba8ad597ec93845b7b99929 | 1,771 | pm | Perl | t/TestLib.pm | gflohr/Parse-Wisent | cb6c64456e6b2e8925ecf54b00340da6895f40fd | [
"WTFPL"
] | null | null | null | t/TestLib.pm | gflohr/Parse-Wisent | cb6c64456e6b2e8925ecf54b00340da6895f40fd | [
"WTFPL"
] | 11 | 2018-02-06T08:19:53.000Z | 2020-09-11T13:30:07.000Z | t/TestLib.pm | gflohr/Parse-Wisent | cb6c64456e6b2e8925ecf54b00340da6895f40fd | [
"WTFPL"
] | null | null | null | # Copyright (C) 2018 Guido Flohr <[email protected]>,
# all rights reserved.
# This program is free software. It comes without any warranty, to
# the extent permitted by applicable law. You can redistribute it
# and/or modify it under the terms of the Do What the Fuck You Want
# to Public License, Version 2, as published by Sam Hocevar. See
# http://www.wtfpl.net/ for more details.
use strict;
use vars qw(@ISA @EXPORT_OK);
use Test::More;
use File::Spec;
use Parse::Kalex;
use lib '.';
@ISA = qw(Exporter);
@EXPORT_OK = qw(create_lexer assert_location);
sub create_lexer {
my ($name, %options) = @_;
my $lfile = File::Spec->catfile('t', 'scanners', $name . '.l');
my $scanner_file = File::Spec->catfile('t', 'scanners', $name . '.pm');
unlink $scanner_file;
my $scanner = Parse::Kalex->new({outfile => $scanner_file,
package => $name, %options},
$lfile);
ok $scanner, "$name new";
ok $scanner->scan, "$name scan";
ok $scanner->output, "$name output";
ok -e $scanner_file, "$name -> $scanner_file";
return $scanner_file if delete $options{x_no_require};
ok require $scanner_file, "$name -> require $scanner_file";
ok((unlink $scanner_file), "$name -> unlink $scanner_file");
my $lexer = $name->new;
ok $lexer, "$name constructor";
return $lexer;
}
sub assert_location {
my ($lexer, $test, @expect) = @_;
my @location = $lexer->yylocation;
my $name = ref $lexer;
is $location[0], $expect[0], "$name $test from_line";
is $location[1], $expect[1], "$name $test from_column";
is $location[2], $expect[2], "$name $test to_line";
is $location[3], $expect[3], "$name $test to_column";
}
| 28.564516 | 75 | 0.618295 |
edd12bb859090c0c8b61979df605019da8694881 | 3,149 | pl | Perl | scripts/Step_12a_Make_Ingenuity_genesets.pl | mgosink/ToxReporter | b10579978e516c059dfcf585db322ae309a48e70 | [
"MIT"
] | null | null | null | scripts/Step_12a_Make_Ingenuity_genesets.pl | mgosink/ToxReporter | b10579978e516c059dfcf585db322ae309a48e70 | [
"MIT"
] | null | null | null | scripts/Step_12a_Make_Ingenuity_genesets.pl | mgosink/ToxReporter | b10579978e516c059dfcf585db322ae309a48e70 | [
"MIT"
] | null | null | null | #!/usr/bin/perl -I..
################################################################
#
# Author(s) - Mark M. Gosink, Ph.D.
# Company - Pfizer Inc.
#
# Creation Date - Thu Mar 5, 2009
# Modified -
#
# Function - Take output of 'pathwaylist' Ingenuity integration
# module and create human, mouse, & rat pathway genesets
# Requires - assumes HTML file contains pathways in rows and
# gene columns in the order: human, mouse, rat
# Input - HTML output from 'pathwaylist'
# Output - FILE: 'Organism_Name'_Ingenuity.gene_sets
#
################################################################
use Text::CSV;
# Required Files
$ingenuity_file = $SetupFiles{ING_PATHWAYS};
# check that the Ingenuity file looks OK
if ((not(-s $ingenuity_file)) || (not(-T $ingenuity_file))) {
die "\n\tRequired Ingenuity pathways file not found or in wrong format at '$ingenuity_file'!\n\n";
}
$file_listing = `ls -l $ingenuity_file`;
chomp($file_listing);
$date = `date`;
chomp($date);
log_err("Running '$0' on '$date'.");
my $desc = "#\tIngenuity Pathways Genesets\n";
$desc .= "#\t" . $0 . ' run on ' . $date . "\n";
$desc .= "#\tusing '" . $ingenuity_file . "'\n";
$desc .= "#\t\tfile listing at runtime '" . $file_listing . "'\n\n";
open(OUTFILE, ">./tmp_files/Mouse_Ingenuity.gene_sets");
print OUTFILE $desc;
my $csv = Text::CSV->new ( { binary => 1 } ) # should set binary attribute.
or die "Cannot use CSV: ".Text::CSV->error_diag ();
open($fh, "<:encoding(utf8)", $ingenuity_file);
binmode STDOUT, ":utf8";
while ($csv->getline( $fh )) {
my @Columns = $csv->fields();
my $id_data = $Columns[0];
$id_data =~ s/.*(ING:[A-Za-z0-9]+).*/$1/;
my $current_path_id = utf_to_html($id_data);
my $current_pathname = utf_to_html($Columns[1]);
my $current_pathtype = utf_to_html($Columns[2]);
my $current_pathentities = utf_to_html($Columns[3]);
my $human = utf_to_html($Columns[4]);
my $mouse = utf_to_html($Columns[5]);
my $rat = utf_to_html($Columns[6]);
$mouse =~ s/\s+//g;
$mouse =~ s/,/\t/g;
print OUTFILE "$current_path_id\t$current_pathname\t$mouse\n";
}
close($fh);
close(OUTFILE);
$date = `date`;
chomp($date);
log_err("Finished '$0' on '$date'.");
exit;
sub utf_to_html {
my $term = $_[0];
$term =~ s/\x{03B1}/α/g; # alpha
$term =~ s/\x{03B2}/ϐ/g; # beta
$term =~ s/\x{0394}/Δ/g; # delta, upper case
$term =~ s/\x{03B4}/δ/g; # delta, lower case
$term =~ s/\x{03B7}/η/g; # eta
$term =~ s/\x{03B9}/ι/g; # iota
$term =~ s/\x{03B3}/γ/g; # gamma
$term =~ s/\x{03BA}/κ/g; # kappa
$term =~ s/\x{03BB}/λ/g; #
$term =~ s/\x{03B5}/ε/g; # epsilon
$term =~ s/\x{03C9}/ω/g; # omega
$term =~ s/\x{03C3}/σ/g; # sigma
$term =~ s/\x{03B8}/θ/g; # theta
$term =~ s/\x{03B6}/ζ/g; # zeta
$term =~ s/\s+/ /g; # remove extra spaces
$term =~ s/\x{2424}//g; # remove newline
$term =~ s/\x{000A}//g; # remove line feed
$term =~ s/\x{2013}/-/g;
$term =~ s/[^[:ascii:]]+/***FOOBAR***/g;
return $term;
}
sub log_err {
my $msg = $_[0];
my $log_file = $0 . ".log";
open (LOG, ">>$log_file");
print LOG "$msg\n";
close(LOG);
}
| 30.278846 | 99 | 0.577961 |
edc3d6a3054950d4b26a4dbe316c01857ade2926 | 6,144 | pm | Perl | lib/Perlbal/Plugin/StickySessions.pm | git-the-cpan/perlbal-plugin-stickysessions | f7e94b40dbf633b29e6a38d4fcb82d5bbe3fb6da | [
"Artistic-1.0-cl8"
] | null | null | null | lib/Perlbal/Plugin/StickySessions.pm | git-the-cpan/perlbal-plugin-stickysessions | f7e94b40dbf633b29e6a38d4fcb82d5bbe3fb6da | [
"Artistic-1.0-cl8"
] | null | null | null | lib/Perlbal/Plugin/StickySessions.pm | git-the-cpan/perlbal-plugin-stickysessions | f7e94b40dbf633b29e6a38d4fcb82d5bbe3fb6da | [
"Artistic-1.0-cl8"
] | null | null | null | package Perlbal::Plugin::StickySessions;
use Perlbal;
use strict;
use warnings;
use Data::Dumper;
use HTTP::Date;
use CGI qw/:standard/;
use CGI::Cookie;
use Scalar::Util qw(blessed reftype);
# LOAD StickySessions
# SET plugins = stickysessions
#
# Add
# my $svc = $self->{service};
# if(ref($svc) && UNIVERSAL::can($svc,'can')) {
# $svc->run_hook('modify_response_headers', $self);
# }
# To sub handle_response in BackendHTTP after Content-Length is set.
#
sub load {
my $class = shift;
return 1;
}
sub unload {
my $class = shift;
return 1;
}
sub get_backend_id {
my $be = shift;
for ( my $i = 0 ; $i <= $#{ $be->{ service }->{ pool }->{ nodes } } ; $i++ )
{
my ( $nip, $nport ) = @{ $be->{ service }->{ pool }->{ nodes }[$i] };
my $nipport = $nip . ':' . $nport;
return $i + 1 if ( $nipport eq $be->{ ipport } );
}
# default to the first backend in the node list.
return 1;
}
sub decode_server_id {
my $id = shift;
return ( $id - 1 );
}
sub get_ipport {
my ( $svc, $req ) = @_;
my $cookie = $req->header('Cookie');
my %cookies = ();
my $ipport = undef;
%cookies = parse CGI::Cookie($cookie) if defined $cookie;
if ( defined $cookie && defined $cookies{ 'X-SERVERID' } ) {
my $val =
$svc->{ pool }
->{ nodes }[ decode_server_id( $cookies{ 'X-SERVERID' }->value ) ];
my ( $ip, $port ) = @{ $val } if defined $val;
$ipport = $ip . ':' . $port;
}
return $ipport;
}
sub find_or_get_new_backend {
my ( $svc, $req, $client ) = @_;
my Perlbal::BackendHTTP $be;
my $ipport = get_ipport( $svc, $req );
my $now = time;
while ( $be = shift @{ $svc->{ bored_backends } } ) {
next if $be->{ closed };
# now make sure that it's still in our pool, and if not, close it
next unless $svc->verify_generation($be);
# don't use connect-ahead connections when we haven't
# verified we have their attention
if ( !$be->{ has_attention } && $be->{ create_time } < $now - 5 ) {
$be->close("too_old_bored");
next;
}
# don't use keep-alive connections if we know the server's
# just about to kill the connection for being idle
if ( $be->{ disconnect_at } && $now + 2 > $be->{ disconnect_at } ) {
$be->close("too_close_disconnect");
next;
}
# give the backend this client
if ( defined $ipport ) {
if ( $be->{ ipport } eq $ipport ) {
if ( $be->assign_client($client) ) {
$svc->spawn_backends;
return 1;
}
}
} else {
if ( $be->assign_client($client) ) {
$svc->spawn_backends;
return 1;
}
}
# assign client can end up closing the connection, so check for that
return 1 if $client->{ closed };
}
return 0;
}
# called when we're being added to a service
sub register {
my ( $class, $gsvc ) = @_;
my $check_cookie_hook = sub {
my Perlbal::ClientProxy $client = shift;
my Perlbal::HTTPHeaders $req = $client->{ req_headers };
return 0 unless defined $req;
my $svc = $client->{ service };
# we define were to send the client request
$client->{ backend_requested } = 1;
$client->state('wait_backend');
return unless $client && !$client->{ closed };
if ( find_or_get_new_backend( $svc, $req, $client ) != 1 ) {
push @{ $svc->{ waiting_clients } }, $client;
$svc->{ waiting_client_count }++;
$svc->{ waiting_client_map }{ $client->{ fd } } = 1;
my $ipport = get_ipport( $svc, $req );
if ( defined($ipport) ) {
my ( $ip, $port ) = split( /\:/, $ipport );
$svc->{ spawn_lock } = 1;
my $be =
Perlbal::BackendHTTP->new( $svc, $ip, $port,
{ pool => $svc->{ pool } } );
$svc->{ spawn_lock } = 0;
} else {
$svc->spawn_backends;
}
$client->tcp_cork(1);
}
return 0;
};
my $set_cookie_hook = sub {
my Perlbal::BackendHTTP $be = shift;
my Perlbal::HTTPHeaders $hds = $be->{ res_headers };
my Perlbal::HTTPHeaders $req = $be->{ req_headers };
return 0 unless defined $be && defined $hds;
my $svc = $be->{ service };
my $cookie = $req->header('Cookie');
my %cookies = ();
%cookies = parse CGI::Cookie($cookie) if defined $cookie;
my $backend_id = get_backend_id($be);
if ( !defined( $cookies{ 'X-SERVERID' } )
|| $cookies{ 'X-SERVERID' }->value != $backend_id )
{
my $backend_cookie =
new CGI::Cookie( -name => 'X-SERVERID', -value => $backend_id );
if ( defined $hds->header('set-cookie') ) {
my $val = $hds->header('set-cookie');
$hds->header( 'Set-Cookie',
$val .= "\r\nSet-Cookie: " . $backend_cookie->as_string );
} else {
$hds->header( 'Set-Cookie', $backend_cookie );
}
}
return 0;
};
$gsvc->register_hook( 'StickySessions', 'start_proxy_request',
$check_cookie_hook );
$gsvc->register_hook( 'StickySessions', 'modify_response_headers',
$set_cookie_hook );
return 1;
}
# called when we're no longer active on a service
sub unregister {
my ( $class, $svc ) = @_;
$svc->unregister_hooks('StickySessions');
$svc->unregister_setters('StickySessions');
return 1;
}
1;
=head1 NAME
Perlbal::Plugin::StickySessions - session affinity for perlbal
=head1 SYNOPSIS
This plugin provides a Perlbal the ability to load balance with
session affinity.
You *must* patch Perlbal for this plugin to work correctly.
Configuration as follows:
LOAD StickySessions
SET plugins = stickysessions
=cut
| 27.306667 | 80 | 0.525553 |
ed565be5e5c61f5b5a73da35ed4836412997fdfc | 8,025 | pl | Perl | models/lnd/clm/src/main/findHistFields.pl | E3SM-Project/iESM | 2a1013a3d85a11d935f1b2a8187a8bbcd75d115d | [
"BSD-3-Clause-LBNL"
] | 9 | 2018-05-15T02:10:40.000Z | 2020-01-10T18:27:31.000Z | models/lnd/clm/src/main/findHistFields.pl | zhangyue292/iESM | 2a1013a3d85a11d935f1b2a8187a8bbcd75d115d | [
"BSD-3-Clause-LBNL"
] | 3 | 2018-10-12T18:41:56.000Z | 2019-11-12T15:18:49.000Z | models/lnd/clm/src/main/findHistFields.pl | zhangyue292/iESM | 2a1013a3d85a11d935f1b2a8187a8bbcd75d115d | [
"BSD-3-Clause-LBNL"
] | 3 | 2018-05-15T02:10:33.000Z | 2021-04-06T17:45:49.000Z | #!/usr/bin/env perl
#
# This perl script reads in the histFldsMod.F90 file to find the total list of history
# fields that can be added for this model version, regardless of namelist options, or
# CPP processing.
#
use strict;
#use warnings;
#use diagnostics;
use Cwd;
use English;
use Getopt::Long;
use IO::File;
use File::Glob ':glob';
# Set the directory that contains the CLM configuration scripts. If the command was
# issued using a relative or absolute path, that path is in $ProgDir. Otherwise assume
# the
# command was issued from the current working directory.
(my $ProgName = $0) =~ s!(.*)/!!; # name of this script
my $ProgDir = $1; # name of directory containing this script -- may be a
# relative or absolute path, or null if the script
# is in
# the user's PATH
my $cmdline = "@ARGV"; # Command line arguments to script
my $cwd = getcwd(); # current working directory
my $cfgdir; # absolute pathname of directory that contains this script
my $nm = "${ProgName}::"; # name to use if script dies
if ($ProgDir) {
$cfgdir = $ProgDir;
} else {
$cfgdir = $cwd;
}
# The namelist definition file contains entries for all namelist variables that
# can be output by build-namelist.
my $nl_definition_file = "$cfgdir/../../bld/namelist_files/namelist_definition.xml";
(-f "$nl_definition_file") or die <<"EOF";
** $ProgName - Cannot find namelist definition file \"$nl_definition_file\" **
EOF
print "Using namelist definition file $nl_definition_file\n";
# The Build::NamelistDefinition module provides utilities to get the list of
# megan compounds
#The root directory to cesm utils Tools
my $cesm_tools = "$cfgdir/../../../../../scripts/ccsm_utils/Tools";
(-f "$cesm_tools/perl5lib/Build/NamelistDefinition.pm") or die <<"EOF";
** $ProgName - Cannot find perl module \"Build/NamelistDefinition.pm\" in directory
\"$cesm_tools/perl5lib\" **
EOF
# Add $cfgdir/perl5lib to the list of paths that Perl searches for modules
my @dirs = ( $cfgdir, "$cesm_tools/perl5lib");
unshift @INC, @dirs;
require Build::NamelistDefinition;
# Create a namelist definition object. This object provides a method for verifying that
# the
# output namelist variables are in the definition file, and are output in the correct
# namelist groups.
my $definition = Build::NamelistDefinition->new($nl_definition_file);
my $mxname = 0;
my $mxlongn = 0;
my %fields;
my $fldnamevar = "fieldname_var";
sub matchKeyword {
#
# Match a keyword
#
my $keyword = shift;
my $line = shift;
my $fh = shift;
my $match = undef;
if ( $line =~ /$keyword/ ) {
if ( $line =~ /$keyword\s*=\s*['"]([^'"]+)['"]/ ) {
$match = $1;
} elsif ( $line =~ /$keyword\s*=\s*&\s*$/ ) {
$line = <$fh>;
if ( $line =~ /^\s*['"]([^'"]+)['"]/ ) {
$match = $1;
} else {
die "ERROR: Trouble getting keyword string\n Line: $line";
}
} else {
if ( $line =~ /fname\s*=\s*fieldname/ ) {
print STDERR "Found variable used for fieldname = $line\n";
$match = $fldnamevar;
} elsif ( $line =~ /fname\s*=\s*trim\(fname\)/ ) {
$match = undef;
} elsif ( $line =~ /units\s*=\s*units/ ) {
$match = undef;
} elsif ( $line =~ /long_name\s*=\s*long_name/ ) {
$match = undef;
} elsif ( $line =~ /long_name\s*=\s*longname/ ) {
print STDERR "Found variable used for longname = $line\n";
$match = "longname_var";
} else {
die "ERROR: Still have a match on $keyword\n Line: $line";
}
}
}
return( $match );
}
sub getFieldInfo {
#
# Get field Information
#
my $fh = shift;
my $line = shift;
my $fname = undef;
my $units = undef;
my $longn = undef;
my $endin = undef;
do {
if ( $line =~ /MEG_/ ) {
$line =~ s|'//'_'|_'|g;
$line =~ s|'//trim\(meg_cmp\%name\)|megancmpd'|gi;
if ( $line =~ /meg_cmp\%name/ ) {
die "ERROR: Still have meg_cmp in a line\n";
}
}
if ( ! defined($fname) ) {
$fname = &matchKeyword( "fname", $line, $fh );
}
if ( ! defined($units) ) {
$units = &matchKeyword( "units", $line, $fh );
}
if ( ! defined($longn) ) {
$longn = &matchKeyword( "long_name", $line, $fh );
}
if ( $line =~ /\)\s*$/ ) {
$endin = 1;
}
if ( ! defined($endin) ) { $line = <$fh>; }
} until( (defined($fname) && defined($units) && defined($longn)) ||
! defined($line) || defined($endin) );
if ( ! defined($fname) ) {
die "ERROR: name undefined for field ending with: $line\n";
}
return( $fname, $longn, $units );
}
sub setField {
#
# Set the field
#
my $name = shift;
my $longn = shift;
my $units = shift;
if ( defined($name) && $name ne $fldnamevar ) {
if ( length($name) > $mxname ) { $mxname = length($name); }
if ( length($longn) > $mxlongn ) { $mxlongn = length($longn); }
my $len;
if ( length($longn) > 90 ) {
$len = 110;
} elsif ( length($longn) > 60 ) {
$len = 90;
} else {
$len = 60;
}
$fields{$name} = sprintf( "%-${len}s\t(%s)", $longn, $units );
}
}
sub XML_Header {
#
# Write out header to history fields file
#
my $outfh = shift;
my $outfilename = shift;
my $filename = shift;
print STDERR " Write out header to history fields file to: $outfilename\n";
my $svnurl = '$URL: https://svn-ccsm-models.cgd.ucar.edu/clm2/branch_tags/iesm11_tags/iesm11_21_cesm1_1_2_rel_n03_clm4_0_54/models/lnd/clm/src/main/findHistFields.pl $';
my $svnid = '$Id: findHistFields.pl 40455 2012-09-21 21:33:45Z muszala $';
print $outfh <<"EOF";
<?xml version="1.0"?>
\<\?xml-stylesheet type="text\/xsl" href="history_fields.xsl"\?\>
\<\!--
List of history file field names, long-names and units for all the fields output
by CLM. This was created by reading in the file: $filename
SVN version information:
$svnurl
$svnid
--\>
\<history_fields\>
EOF
}
sub XML_Footer {
#
# Write out footer to history fields file
#
my $outfh = shift;
print STDERR " Write out footer to history fields file\n";
print $outfh "\n</history_fields>\n";
}
my $pwd = `pwd`;
chomp( $pwd );
my $filename = "$pwd/histFldsMod.F90";
my $fh = IO::File->new($filename, '<') or die "** $ProgName - can't open history Fields file: $filename\n";
my @megcmpds = $definition->get_valid_values( "megan_cmpds", 'noquotes'=>1 );
#
# Read in the list of fields from the source file
# And output to an XML file
#
my $outfilename = "$pwd/../../bld/namelist_files/history_fields.xml";
my $outfh = IO::File->new($outfilename, '>') or die "** $ProgName - can't open output history Fields XML file: $outfilename\n";
&XML_Header( $outfh, $outfilename, $filename );
while (my $line = <$fh>) {
# Comments
if ($line =~ /(.*)\!/) {
$line = $1;
}
my $format = "\n<field name='%s' units='%s'\n long_name='%s'\n/>\n";
if ($line =~ /call\s*hist_addfld/i ) {
(my $name, my $longn, my $units) = &getFieldInfo( $fh, $line );
if ( $name ne "MEG_megancmpd" ) {
&setField( $name, $longn, $units );
printf( $outfh $format, $name, $units, $longn );
} else {
foreach my $megcmpd ( @megcmpds ) {
my $name = "MEG_${megcmpd}";
&setField( $name, $longn, $units );
printf( $outfh $format, $name, $units, $longn );
}
}
}
}
close( $fh );
&XML_Footer( $outfh );
close( $outfh );
print STDERR " mxname = $mxname\n";
print STDERR " mxlongn = $mxlongn\n";
#
# List the fields in a neatly ordered list
#
foreach my $name ( sort(keys(%fields)) ) {
my $len;
if ( length($name) > 20 ) {
$len = 40;
} else {
$len = 20;
}
printf( "%-${len}s = %s\n", $name, $fields{$name} );
}
| 30.169173 | 171 | 0.577321 |
edc561fd465f49345e8fae2a8e0aae8c7830cdc5 | 224 | t | Perl | t/00-load.t | ThisUsedToBeAnEmail/Moonshine-Util | 7e9e7eacda7ee73174f7c4c7ef4147c6b51ba0a5 | [
"Artistic-2.0",
"Unlicense"
] | null | null | null | t/00-load.t | ThisUsedToBeAnEmail/Moonshine-Util | 7e9e7eacda7ee73174f7c4c7ef4147c6b51ba0a5 | [
"Artistic-2.0",
"Unlicense"
] | null | null | null | t/00-load.t | ThisUsedToBeAnEmail/Moonshine-Util | 7e9e7eacda7ee73174f7c4c7ef4147c6b51ba0a5 | [
"Artistic-2.0",
"Unlicense"
] | null | null | null | #!perl -T
use 5.006;
use strict;
use warnings;
use Test::More;
plan tests => 1;
BEGIN {
use_ok( 'Moonshine::Util' ) || print "Bail out!\n";
}
diag( "Testing Moonshine::Util $Moonshine::Util::VERSION, Perl $], $^X" );
| 16 | 74 | 0.620536 |
edc91c913feaf0fe5e4d95f04c1889e151f03aee | 4,597 | t | Perl | modules/t/rangeRegistry.t | arnaudxk/ensembl | 2baa0f180dde74e9d89b7ac0495d15c79bc63ff3 | [
"Apache-2.0"
] | null | null | null | modules/t/rangeRegistry.t | arnaudxk/ensembl | 2baa0f180dde74e9d89b7ac0495d15c79bc63ff3 | [
"Apache-2.0"
] | null | null | null | modules/t/rangeRegistry.t | arnaudxk/ensembl | 2baa0f180dde74e9d89b7ac0495d15c79bc63ff3 | [
"Apache-2.0"
] | 1 | 2016-02-17T09:50:07.000Z | 2016-02-17T09:50:07.000Z | # Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
use strict;
use warnings;
use Test::More;
use Bio::EnsEMBL::Test::TestUtils;
use Bio::EnsEMBL::Mapper::RangeRegistry;
our $verbose= 0;
my $rr = Bio::EnsEMBL::Mapper::RangeRegistry->new();
my $id = 'ID1';
#expect [100,400] back
my $range = $rr->check_and_register($id, 200,300, 100,400);
ok(@$range==1 && $range->[0]->[0] == 100 && $range->[0]->[1] == 400);
print_ranges($range);
#expect undef back
$range = $rr->check_and_register($id, 150,190, 100,200);
ok(!defined($range));
print_ranges($range);
#expect [401,500] back
$range = $rr->check_and_register($id, 200, 500);
ok(@$range==1 && $range->[0]->[0] == 401 && $range->[0]->[1] == 500);
print_ranges($range);
#expect undef back
$range = $rr->check_and_register($id, 300, 500);
ok(!defined($range));
print_ranges($range);
#expect 700-900 back
$range = $rr->check_and_register($id, 700, 900);
ok(@$range==1 && $range->[0]->[0] == 700 && $range->[0]->[1] == 900);
print_ranges($range);
# expect 1000-1200 back
$range = $rr->check_and_register($id, 1050, 1150, 1000, 1200);
ok(@$range==1 && $range->[0]->[0] == 1000 && $range->[0]->[1] == 1200);
print_ranges($range);
#expect 50-99, 501-699, 901-950 back
$range = $rr->check_and_register($id, 50, 200, 50, 950);
ok(@$range==3 && $range->[0]->[0] == 50 && $range->[0]->[1] == 99);
ok(@$range==3 && $range->[1]->[0] == 501 && $range->[1]->[1] == 699);
ok(@$range==3 && $range->[2]->[0] == 901 && $range->[2]->[1] == 950);
print_ranges($range);
#make sure that the interal list has been merged into 2 ranges
#we have to do this to make sure that it is efficient
my $internal_list = $rr->{'registry'}->{$id};
ok(@$internal_list == 2);
#check that creating adjacent regions merges the list correctly
$range = $rr->check_and_register($id, 40,45,30,49);
ok(@$internal_list == 2);
ok(@$range==1 && $range->[0]->[0] == 30 && $range->[0]->[1] == 49);
print_ranges($range);
$range = $rr->check_and_register($id, 951, 999);
ok(@$internal_list == 1);
ok($range && $range->[0]->[0] == 951 && $range->[0]->[1] == 999);
print_ranges($range);
# Check that a single range can be added to the beginning
$range = $rr->check_and_register($id, 1, 10, 1,20);
ok(@$internal_list == 2);
ok(@$range==1 && $range->[0]->[0] == 1 && $range->[0]->[1] == 20);
print_ranges($range);
#check range that spans entire existing ranges
$range = $rr->check_and_register($id, 1, 1200);
ok(@$internal_list == 1);
ok(@$range==1 && $range->[0]->[0] == 21 && $range->[0]->[1] == 29);
print_ranges($range);
#check adding identical range to existing internal range
$range = $rr->check_and_register($id, 1, 1200);
ok(!defined($range));
print_ranges($range);
#check requesting small area of size 1
$range = $rr->check_and_register($id,10,10, 1, 1e6);
ok(!defined($range));
print_ranges($range);
#check that adding a range to a different id works
$range = $rr->check_and_register("ID2", 100,500, 1, 600);
ok($range && @$range==1 && $range->[0]->[0] == 1 && $range->[0]->[1] == 600);
print_ranges($range);
# I suspect there is a small bug in respect to handling the
# second argument extended range
# setup some ranges
$range = $rr->check_and_register( "rr_bug", 2,10 );
$range = $rr->check_and_register( "rr_bug", 15,20 );
$range = $rr->check_and_register( "rr_bug", 25,30 );
my $overlap = $rr->overlap_size( "rr_bug", 3, 40 );
ok( $overlap == 20 );
$range = $rr->check_and_register( "rr_bug", 28, 35, 3, 40 );
debug( "*** extended bug test ***" );
print_ranges( $range );
# this should result in 2,40 to be covered in the range registry
ok(@$range==3 && $range->[0]->[0] == 11 && $range->[0]->[1] == 14);
ok(@$range==3 && $range->[1]->[0] == 21 && $range->[1]->[1] == 24);
ok(@$range==3 && $range->[2]->[0] == 31 && $range->[2]->[1] == 40);
sub print_ranges {
my $rangelist = shift;
if(!defined($rangelist)) {
debug("UNDEF");
return;
}
foreach my $range (@$rangelist) {
debug('['.$range->[0].'-'.$range->[1].']');
}
}
done_testing();
| 31.272109 | 102 | 0.630629 |
ed912da9605d120d189e558d3bf7340097e23e6c | 1,053 | pl | Perl | categories/perlmonks/combinations-731808.pl | amit1999999/amit1 | 658755cf1fbfeaebc82124a93054e0695d087d1a | [
"Artistic-2.0"
] | 198 | 2015-01-07T17:07:56.000Z | 2019-12-26T17:15:33.000Z | categories/perlmonks/combinations-731808.pl | amit1999999/amit1 | 658755cf1fbfeaebc82124a93054e0695d087d1a | [
"Artistic-2.0"
] | 44 | 2015-01-23T08:11:58.000Z | 2020-01-12T13:18:02.000Z | categories/perlmonks/combinations-731808.pl | amit1999999/amit1 | 658755cf1fbfeaebc82124a93054e0695d087d1a | [
"Artistic-2.0"
] | 84 | 2015-01-19T04:39:20.000Z | 2019-12-18T18:02:45.000Z | use v6;
=begin pod
=TITLE Combinations
=AUTHOR Eric Hodges
Specification:
From L<http://www.perlmonks.org/?node_id=731808>
Given a list of URL prefixes, and a list of product IDs, make a list
consisting of each URL prefix concatenated with each product ID.
=end pod
my @urls = ('http://www.something.com/blah.aspx?code=',
'http://www.somethingelse.com/stuff.aspx?thing=');
my @ids = <375035304 564564774 346464646>;
# 1. Cross then map
# We use the cross operator X to make every combination of pairs from @urls
# and @ids. We then use map to stringify each pair. $^a is a "placeholder
# argument" - in this case, it refers to the only argument to the block.
my @combined = (@urls X @ids).map: { ~$^a };
.say for @combined;
# 2. cross hyperoperator
# We use the cross hyperoperator X~
# This combines each element from list1 with each element from list2 using ~
# You can use any infix operator.
# Try (1,2,3) X* (1,2,3) to generate a multiplication table.
.say for @urls X~ @ids;
# vim: expandtab shiftwidth=4 ft=perl6
| 25.682927 | 76 | 0.705603 |
edd8cbbd878fa76aa165645886170afd86a0d6cd | 342 | al | Perl | tools/perl/lib/auto/posix/siglongjmp.al | npocmaka/Windows-Server-2003 | 5c6fe3db626b63a384230a1aa6b92ac416b0765f | [
"Unlicense"
] | 17 | 2020-11-13T13:42:52.000Z | 2021-09-16T09:13:13.000Z | tools/perl/lib/auto/posix/siglongjmp.al | sancho1952007/Windows-Server-2003 | 5c6fe3db626b63a384230a1aa6b92ac416b0765f | [
"Unlicense"
] | 2 | 2020-10-19T08:02:06.000Z | 2020-10-19T08:23:18.000Z | tools/perl/lib/auto/posix/siglongjmp.al | sancho1952007/Windows-Server-2003 | 5c6fe3db626b63a384230a1aa6b92ac416b0765f | [
"Unlicense"
] | 14 | 2020-11-14T09:43:20.000Z | 2021-08-28T08:59:57.000Z | # NOTE: Derived from ..\..\lib\POSIX.pm.
# Changes made here will be lost when autosplit is run again.
# See AutoSplit.pm.
package POSIX;
#line 206 "..\..\lib\POSIX.pm (autosplit into ..\..\lib\auto\POSIX\siglongjmp.al)"
sub siglongjmp {
unimpl "siglongjmp() is C-specific: use die instead";
}
# end of POSIX::siglongjmp
1;
| 26.307692 | 83 | 0.663743 |
edc57df54884178247131acd9957c7e92cbaa403 | 3,796 | pm | Perl | hardware/server/hp/bladechassis/snmp/mode/components/fan.pm | cstegm/centreon-plugins | b29bdb670de52a22d3520661dc7b9d2548ae7a1a | [
"Apache-2.0"
] | null | null | null | hardware/server/hp/bladechassis/snmp/mode/components/fan.pm | cstegm/centreon-plugins | b29bdb670de52a22d3520661dc7b9d2548ae7a1a | [
"Apache-2.0"
] | null | null | null | hardware/server/hp/bladechassis/snmp/mode/components/fan.pm | cstegm/centreon-plugins | b29bdb670de52a22d3520661dc7b9d2548ae7a1a | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::server::hp::bladechassis::snmp::mode::components::fan;
use strict;
use warnings;
my %map_conditions = (
1 => 'other',
2 => 'ok',
3 => 'degraded',
4 => 'failed',
);
my %present_map = (
1 => 'other',
2 => 'absent',
3 => 'present',
4 => 'Weird!!!', # for blades it can return 4, which is NOT spesified in MIB
);
sub check {
my ($self) = @_;
$self->{components}->{fan} = {name => 'fans', total => 0, skip => 0};
$self->{output}->output_add(long_msg => "Checking fans");
return if ($self->check_exclude(section => 'fan'));
my $oid_cpqRackCommonEnclosureFanPresent = '.1.3.6.1.4.1.232.22.2.3.1.3.1.8';
my $oid_cpqRackCommonEnclosureFanIndex = '.1.3.6.1.4.1.232.22.2.3.1.3.1.3';
my $oid_cpqRackCommonEnclosureFanPartNumber = '.1.3.6.1.4.1.232.22.2.3.1.3.1.6';
my $oid_cpqRackCommonEnclosureFanSparePartNumber = '.1.3.6.1.4.1.232.22.2.3.1.3.1.7';
my $oid_cpqRackCommonEnclosureFanCondition = '.1.3.6.1.4.1.232.22.2.3.1.3.1.11';
my $result = $self->{snmp}->get_table(oid => $oid_cpqRackCommonEnclosureFanPresent);
return if (scalar(keys %$result) <= 0);
my @get_oids = ();
my @oids_end = ();
foreach my $key ($self->{snmp}->oid_lex_sort(keys %$result)) {
$key =~ /\.([0-9]+)$/;
my $oid_end = $1;
next if ($present_map{$result->{$key}} ne 'present' &&
$self->absent_problem(section => 'fan', instance => $oid_end));
push @oids_end, $oid_end;
push @get_oids, $oid_cpqRackCommonEnclosureFanIndex . "." . $oid_end, $oid_cpqRackCommonEnclosureFanPartNumber . "." . $oid_end,
$oid_cpqRackCommonEnclosureFanSparePartNumber . "." . $oid_end, $oid_cpqRackCommonEnclosureFanCondition . "." . $oid_end;
}
$result = $self->{snmp}->get_leef(oids => \@get_oids);
foreach (@oids_end) {
my $fan_index = $result->{$oid_cpqRackCommonEnclosureFanIndex . '.' . $_};
my $fan_condition = $result->{$oid_cpqRackCommonEnclosureFanCondition . '.' . $_};
my $fan_part = $result->{$oid_cpqRackCommonEnclosureFanPartNumber . '.' . $_};
my $fan_spare = $result->{$oid_cpqRackCommonEnclosureFanSparePartNumber . '.' . $_};
next if ($self->check_exclude(section => 'fan', instance => $fan_index));
$self->{components}->{fan}->{total}++;
$self->{output}->output_add(long_msg => sprintf("Fan %d condition is %s [part: %s, spare: %s].",
$fan_index, $map_conditions{$fan_condition},
$fan_part, $fan_spare));
my $exit = $self->get_severity(section => 'fan', value => $map_conditions{$fan_condition});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("Fan %d condition is %s", $fan_index, $map_conditions{$fan_condition}));
}
}
}
1; | 42.651685 | 137 | 0.614594 |
73f60abf4eec30321eff70f53c44eb7925fbe4b2 | 3,390 | pm | Perl | t/lib/t/MusicBrainz/Server/Edit/Artist/Delete.pm | kellnerd/musicbrainz-server | 9e058e10219ea6b8942cfd64160ffe19769f747b | [
"BSD-2-Clause"
] | 577 | 2015-01-15T12:18:50.000Z | 2022-03-16T20:41:57.000Z | t/lib/t/MusicBrainz/Server/Edit/Artist/Delete.pm | kellnerd/musicbrainz-server | 9e058e10219ea6b8942cfd64160ffe19769f747b | [
"BSD-2-Clause"
] | 1,227 | 2015-04-16T01:00:29.000Z | 2022-03-30T15:08:46.000Z | t/lib/t/MusicBrainz/Server/Edit/Artist/Delete.pm | kellnerd/musicbrainz-server | 9e058e10219ea6b8942cfd64160ffe19769f747b | [
"BSD-2-Clause"
] | 280 | 2015-01-04T08:39:41.000Z | 2022-03-10T17:09:59.000Z | package t::MusicBrainz::Server::Edit::Artist::Delete;
use Test::Routine;
use Test::More;
use Test::Fatal;
with 't::Edit';
with 't::Context';
BEGIN { use MusicBrainz::Server::Edit::Artist::Delete }
use MusicBrainz::Server::Constants qw( $EDITOR_MODBOT $EDIT_ARTIST_DELETE );
use MusicBrainz::Server::Constants ':edit_status';
use MusicBrainz::Server::Test qw( accept_edit reject_edit );
test all => sub {
my $test = shift;
my $c = $test->c;
MusicBrainz::Server::Test->prepare_test_database($c, '+edit_artist_delete');
my $artist = $c->model('Artist')->get_by_id(3);
my $edit = create_edit($c, $artist);
isa_ok($edit, 'MusicBrainz::Server::Edit::Artist::Delete');
my ($edits, $hits) = $c->model('Edit')->find({ artist => 3 }, 10, 0);
is($hits, 1);
is($edits->[0]->id, $edit->id);
$artist = $c->model('Artist')->get_by_id(3);
is($artist->edits_pending, 1);
# Test rejecting the edit
reject_edit($c, $edit);
$artist = $c->model('Artist')->get_by_id(3);
ok(defined $artist);
is($artist->edits_pending, 0);
# Test accepting the edit
# This should fail as the artist has a recording linked
$edit = create_edit($c, $artist);
accept_edit($c, $edit);
$artist = $c->model('Artist')->get_by_id(3);
is($edit->status, $STATUS_FAILEDDEP);
ok(defined $artist);
# Delete the recording and enter the edit
my $sql = $c->sql;
Sql::run_in_transaction(sub { $c->model('Recording')->delete(1) }, $sql);
$edit = create_edit($c, $artist);
accept_edit($c, $edit);
$artist = $c->model('Artist')->get_by_id(3);
ok(!defined $artist);
my $ipi_codes = $c->model('Artist')->ipi->find_by_entity_id(3);
is(scalar @$ipi_codes, 0, 'IPI codes for deleted artist removed from database');
my $isni_codes = $c->model('Artist')->isni->find_by_entity_id(3);
is(scalar @$isni_codes, 0, 'ISNI codes for deleted artist removed from database');
};
test 'Can be entered as an auto-edit' => sub {
my $test = shift;
my $c = $test->c;
MusicBrainz::Server::Test->prepare_test_database($c, '+edit_artist_delete');
my $artist = $c->model('Artist')->get_by_id(3);
# Delete the recording and enter the edit
my $sql = $c->sql;
Sql::run_in_transaction(
sub {
$c->model('Recording')->delete(1);
}, $sql);
my $edit = $c->model('Edit')->create(
edit_type => $EDIT_ARTIST_DELETE,
to_delete => $artist,
editor_id => $EDITOR_MODBOT,
privileges => 1
);
isa_ok($edit, 'MusicBrainz::Server::Edit::Artist::Delete');
$artist = $c->model('Artist')->get_by_id(3);
ok(!defined $artist);
};
test 'Edit is failed if artist no longer exists' => sub {
my $test = shift;
my $c = $test->c;
my $artist_row = $c->model('Artist')->insert({
name => 'Gonna B Deleted',
sort_name => 'Gonna B Deleted',
});
my $artist = $c->model('Artist')->get_by_id($artist_row->{id});
my $edit1 = create_edit($c, $artist);
my $edit2 = create_edit($c, $artist);
$edit1->accept;
isa_ok exception { $edit2->accept }, 'MusicBrainz::Server::Edit::Exceptions::FailedDependency';
};
sub create_edit {
my ($c, $artist) = @_;
return $c->model('Edit')->create(
edit_type => $EDIT_ARTIST_DELETE,
to_delete => $artist,
editor_id => 1
);
}
1;
| 29.224138 | 99 | 0.608555 |
eda755c1bf1b5ea95c61a3232f13d0156a25e771 | 9,650 | pl | Perl | src/fasill_arithmetic.pl | jariazavalverde/fasill | cffcd908aa170b4afdd6355be04d40cb3da5187e | [
"BSD-3-Clause"
] | 15 | 2018-11-28T02:48:25.000Z | 2021-05-24T19:06:13.000Z | src/fasill_arithmetic.pl | jariazavalverde/fasill | cffcd908aa170b4afdd6355be04d40cb3da5187e | [
"BSD-3-Clause"
] | null | null | null | src/fasill_arithmetic.pl | jariazavalverde/fasill | cffcd908aa170b4afdd6355be04d40cb3da5187e | [
"BSD-3-Clause"
] | 1 | 2021-02-15T19:25:31.000Z | 2021-02-15T19:25:31.000Z | /* Part of FASILL
Author: José Antonio Riaza Valverde
E-mail: [email protected]
WWW: https://dectau.uclm.es/fasill
Copyright (c) 2018 - 2021, José Antonio Riaza Valverde
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
:- module(fasill_arithmetic, [
arithmetic_evaluation/3,
arithmetic_comparison/3
]).
:- use_module(fasill_exceptions).
:- use_module(fasill_term).
/** <module> Arithmetic
This library provides basic predicates for arithmetic comparison and
arithmetic evaluation.
The general arithmetic predicates all handle expressions. An expression is
either a number or a function. The arguments of a function are expressions.
FASILL defines the following numeric types:
* `integer`: depending on the Prolog system on which FASILL is executed,
integers can be bounded or not. The type of integer support can be
detected using the FASILL flags `bounded`, `min_integer` and
`max_integer`.
* `float`: floating point numbers. On most of today's platforms these are
64-bit IEEE floating point numbers.
*/
%! arithmetic_evaluation(+Indicator, +Expression, ?Result)
%
% This predicate succeeds when Result is the result of evaluating the
% expression Expression. This predicate throws an arithmetical exception if
% there is any problem.
arithmetic_evaluation(Indicator, var(_), _) :-
!,
fasill_exceptions:instantiation_error(Indicator, Error),
fasill_exceptions:throw_exception(Error).
arithmetic_evaluation(_, num(X), num(X)) :-
!.
arithmetic_evaluation(Indicator, term(Op,Args), Result) :-
catch(
( maplist(arithmetic_evaluation(Indicator), Args, Args_),
maplist(arithmetic_type, Args_, Types),
fasill_term:maplist(to_prolog, Args_, Prolog),
arithmetic_op(Op, Prolog, Types, Result),
!
), Error,
(Error = type(Type, From) ->
(fasill_term:from_prolog(From, From_),
fasill_exceptions:type_error(Type, From_, Indicator, Exception),
fasill_exceptions:throw_exception(Exception)) ;
(Error = evaluation(Cause) ->
(fasill_exceptions:evaluation_error(Cause, Indicator, Exception),
fasill_exceptions:throw_exception(Exception)) ;
(Error = exception(Exception) ->
fasill_exceptions:throw_exception(Exception) ;
fasill_exceptions:throw_exception(Error))))).
%! arithmetic_comparison(+Op, +Expression1, +Expression2)
%
% This predicate succeeds when expressions Expression1 and Expression2,
% evaluated as much as possible, fulfill the ordering relation Op.
arithmetic_comparison(Name/2, Expr1, Expr2) :-
arithmetic_evaluation(Name/2, Expr1, Result1),
arithmetic_evaluation(Name/2, Expr2, Result2),
fasill_term:to_prolog(Result1, Result1_),
fasill_term:to_prolog(Result2, Result2_),
call(Name, Result1_, Result2_).
%! arithmetic_type(+Number, ?Type)
%
% This predicate succeeds when Number has the type Type (`integer` or
% `float`).
arithmetic_type(num(X), integer) :-
integer(X).
arithmetic_type(num(X), float) :-
float(X).
%! arithmetic_op(+Operator, +Arguments, +Types, ?Result)
%
% This predicate succeeds when Result is the result of evaluating the
% operator Operator with the arguments Arguments with types Types.
% Pi (constant)
arithmetic_op(pi, [], _, num(Z)) :-
Z is pi.
% E (constant)
arithmetic_op(e, [], _, num(Z)) :-
Z is e.
% Addition
arithmetic_op('+', [X,Y], _, num(Z)) :-
Z is X+Y.
% Subtraction
arithmetic_op('-', [X,Y], _, num(Z)) :-
Z is X-Y.
% Multiplication
arithmetic_op('*', [X,Y], _, num(Z)) :-
Z is X*Y.
% Exponentiation
arithmetic_op('**', [X,Y], _, num(Z)) :-
Z is float(X**Y).
% Division
arithmetic_op('/', [_,0], _, _) :-
!,
throw(evaluation(zero_division)).
arithmetic_op('/', [_,0.0], _, _) :-
!,
throw(evaluation(zero_division)).
arithmetic_op('/', [X,Y], _, num(Z)) :-
Z is float(X/Y).
% Integer division
arithmetic_op('//', [X,_], [float,_], _) :-
throw(type(integer, X)).
arithmetic_op('//', [_,Y], [_,float], _) :-
throw(type(integer, Y)).
arithmetic_op('//', [_,0], _, _) :-
!,
throw(evaluation(zero_division)).
arithmetic_op('//', [_,0.0], _, _) :-
!,
throw(evaluation(zero_division)).
arithmetic_op('//', [X,Y], _, num(Z)) :-
Z is X//Y.
% Unary addition
arithmetic_op('+', [X], _, num(Z)) :-
Z is X.
% Negation
arithmetic_op('-', [X], _, num(Z)) :-
Z is -X.
% Exp
arithmetic_op(exp, [X], _, num(Z)) :-
Z is exp(X).
% Square root
arithmetic_op(sqrt, [X], _, num(Z)) :-
Z is sqrt(X).
% Logarithm
arithmetic_op(log, [X], _, num(Z)) :-
X =< 0 ->
throw(evaluation(undefined)) ;
Z is log(X).
% Trigonometric functions
arithmetic_op(sin, [X], _, num(Z)) :-
Z is sin(X).
arithmetic_op(cos, [X], _, num(Z)) :-
Z is cos(X).
arithmetic_op(tan, [X], _, num(Z)) :-
Z is tan(X).
arithmetic_op(asin, [X], _, num(Z)) :-
Z is asin(X).
arithmetic_op(acos, [X], _, num(Z)) :-
Z is acos(X).
arithmetic_op(atan, [X], _, num(Z)) :-
Z is atan(X).
% Sign
arithmetic_op(sign, [X], _, num(Z)) :-
Z is sign(X).
% Float
arithmetic_op(float, [X], _, num(Z)) :-
Z is float(X).
% Floor
arithmetic_op(floor, [X], [integer], _) :-
throw(type(float, X)).
arithmetic_op(floor, [X], _, num(Z)) :-
Z is floor(X).
% Round
arithmetic_op(round, [X], [integer], _) :-
throw(type(float, X)).
arithmetic_op(round, [X], _, num(Z)) :-
Z is round(X).
% Truncate
arithmetic_op(truncate, [X], [integer], _) :-
throw(type(float, X)).
arithmetic_op(truncate, [X], _, num(Z)) :-
Z is truncate(X).
% Ceiling
arithmetic_op(ceiling, [X], [integer], _) :-
throw(type(float, X)).
arithmetic_op(ceiling, [X], _, num(Z)) :-
Z is ceiling(X).
% Integer part
arithmetic_op(float_integer_part, [X], [integer], _) :-
throw(type(float, X)).
arithmetic_op(float_integer_part, [X], _, num(Z)) :-
Z is float_integer_part(X).
% Fractional part
arithmetic_op(float_fractional_part, [X], [integer], _) :-
throw(type(float, X)).
arithmetic_op(float_fractional_part, [X], _, num(Z)) :-
Z is float_fractional_part(X).
% Absolute value
arithmetic_op(abs, [X], _, num(Z)) :-
Z is abs(X).
% Remainder
arithmetic_op(rem, [X,_], [float,_], _) :-
throw(type(integer, X)).
arithmetic_op(rem, [_,Y], [_,float], _) :-
throw(type(integer, Y)).
arithmetic_op(rem, [_,0], _, _) :-
!,
throw(evaluation(zero_division)).
arithmetic_op(rem, [X,Y], _, num(Z)) :-
Z is rem(X,Y).
% Modulus
arithmetic_op(mod, [X,_], [float,_], _) :-
throw(type(integer, X)).
arithmetic_op(mod, [_,Y], [_,float], _) :-
throw(type(integer, Y)).
arithmetic_op(mod, [_,0], _, _) :-
!,
throw(evaluation(zero_division)).
arithmetic_op(mod, [X,Y], _, num(Z)) :-
Z is mod(X,Y).
% Minimum
arithmetic_op(min, [X,Y], _, num(Z)) :-
Z is min(X,Y).
% Maximum
arithmetic_op(max, [X,Y], _, num(Z)) :-
Z is max(X,Y).
% Bitwise operators
arithmetic_op('<<', [X,_], [float,_], _) :-
throw(type(integer, X)).
arithmetic_op('<<', [_,Y], [_,float], _) :-
throw(type(integer, Y)).
arithmetic_op('<<', [X,Y], _, num(Z)) :-
Z is X << Y.
arithmetic_op('>>', [X,_], [float,_], _) :-
throw(type(integer, X)).
arithmetic_op('>>', [_,Y], [_,float], _) :-
throw(type(integer, Y)).
arithmetic_op('>>', [X,Y], _, num(Z)) :-
Z is X >> Y.
arithmetic_op('\\/', [X,_], [float,_], _) :-
throw(type(integer, X)).
arithmetic_op('\\/', [_,Y], [_,float], _) :-
throw(type(integer, Y)).
arithmetic_op('\\/', [X,Y], _, num(Z)) :-
Z is '\\/'(X,Y).
arithmetic_op('/\\', [X,_], [float,_], _) :-
throw(type(integer, X)).
arithmetic_op('/\\', [_,Y], [_,float], _) :-
throw(type(integer, Y)).
arithmetic_op('/\\', [X,Y], _, num(Z)) :-
Z is '/\\'(X,Y).
arithmetic_op('\\', [X], [float], _) :-
throw(type(integer, X)).
arithmetic_op('\\', [X], _, num(Z)) :-
Z is '\\'(X).
arithmetic_op(Op, Args, _, _) :-
length(Args, Length),
throw(type(evaluable, Op/Length)). | 33.859649 | 81 | 0.638549 |
edbf903cc26eac21afb2568991f61283949fdd62 | 483 | t | Perl | t/local-SIG.t | miyagawa/CGI-Compile | 4c24fad84723e2f625a3c2f92e7724d41c1fb94d | [
"Artistic-1.0"
] | 3 | 2015-11-05T07:18:58.000Z | 2020-09-24T07:41:45.000Z | t/local-SIG.t | miyagawa/CGI-Compile | 4c24fad84723e2f625a3c2f92e7724d41c1fb94d | [
"Artistic-1.0"
] | 11 | 2015-03-01T14:00:41.000Z | 2020-08-03T18:52:02.000Z | t/local-SIG.t | miyagawa/CGI-Compile | 4c24fad84723e2f625a3c2f92e7724d41c1fb94d | [
"Artistic-1.0"
] | 4 | 2017-02-01T13:05:53.000Z | 2017-09-30T15:11:05.000Z | #!perl
use Capture::Tiny 'capture_stdout';
use CGI::Compile;
use POSIX qw(:signal_h);
use Test::More $^O eq 'MSWin32' ? (
skip_all => 'not supported on Win32')
: (
tests => 1
);
unless (defined sigprocmask(SIG_UNBLOCK, POSIX::SigSet->new(SIGQUIT))) {
die "Could not unblock SIGQUIT\n";
}
my $sub = CGI::Compile->compile(\<<'EOF');
$SIG{QUIT} = sub{print "QUIT\n"};
kill QUIT => $$;
print "END\n";
EOF
is capture_stdout { $sub->() }, "QUIT\nEND\n", 'caught signal';
| 20.125 | 72 | 0.6294 |
73f55dbfaead6468519974dd06348d43553a90c2 | 6,208 | pm | Perl | modules/EnsEMBL/Web/Component/CloudMultiSelector.pm | at7/backup-ensembl-webcode | 4c8c30f2ba9e0eebc3dd07e068fb6e02c388d086 | [
"Apache-2.0",
"MIT"
] | null | null | null | modules/EnsEMBL/Web/Component/CloudMultiSelector.pm | at7/backup-ensembl-webcode | 4c8c30f2ba9e0eebc3dd07e068fb6e02c388d086 | [
"Apache-2.0",
"MIT"
] | null | null | null | modules/EnsEMBL/Web/Component/CloudMultiSelector.pm | at7/backup-ensembl-webcode | 4c8c30f2ba9e0eebc3dd07e068fb6e02c388d086 | [
"Apache-2.0",
"MIT"
] | null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
package EnsEMBL::Web::Component::CloudMultiSelector;
use strict;
use HTML::Entities qw(encode_entities);
use List::MoreUtils qw(uniq);
use base qw(EnsEMBL::Web::Component);
sub _init {
my $self = shift;
$self->{'panel_type'} = 'CloudMultiSelector';
$self->{'url_param'} = ''; # This MUST be implemented in the child _init function - it is the name of the parameter you want for the URL, eg if you want to add parameters s1, s2, s3..., $self->{'url_param'} = 's'
}
sub _content_li {
my ($self,$key,$content,$on,$partial) = @_;
my $class;
$class .= "off" unless $on;
$class .= "partial" if $partial;
$class .= "heading" if $on>1;
return qq(
<li class="$class" data-key="$key">$content</li>);
}
sub _sort_values {
my ($self,$values) = @_;
my $sort_func = $self->{'sort_func'};
$sort_func = sub { [ sort {$a cmp $b} @{$_[0]} ]; } unless $sort_func;
return $sort_func->($values);
}
sub content_ajax {
my $self = shift;
my $hub = $self->hub;
my %all = %{$self->{'all_options'}}; # Set in child content_ajax function - complete list of options in the form { URL param value => display label }
my %included = %{$self->{'included_options'}}; # Set in child content_ajax function - List of options currently set in URL in the form { url param value => order } where order is 1, 2, 3 etc.
my %partial = %{$self->{'partial_options'}||{}};
my @all_categories = @{$self->{'categories'}||[]};
my $url = $self->{'url'} || $hub->url({ function => undef, align => $hub->param('align') }, 1);
my $extra_inputs = join '', map sprintf('<input type="hidden" name="%s" value="%s" />', encode_entities($_), encode_entities($url->[1]{$_})), sort keys %{$url->[1]};
my $select_by = join '', map sprintf('<option value="%s">%s</option>', @$_), @{$self->{'select_by'} || []};
$select_by = qq{<div class="select_by"><h2>Select by type:</h2><select><option value="">----------------------------------------</option>$select_by</select></div>} if $select_by;
my @display;
foreach my $category ((@all_categories,undef)) {
# The data
my %items;
foreach my $key (keys %all) {
if(defined $category) {
my $my_category = ($self->{'category_map'}||{})->{$key};
$my_category ||= $self->{'default_category'};
if($my_category) {
next unless $my_category eq $category; # in a cat, is it ours?
} else {
next; # in a cat, we don't have one
}
} else {
# not in a cat
next if ($self->{'category_map'}||{})->{$key} || $self->{'default_category'};
}
my $cluster = ($self->{'cluster_map'}||{})->{$key} || '';
push @{$items{$cluster}||=[]},$key;
}
push @display,{
category => $category,
clusters => \%items
};
}
my $include_html;
foreach my $d (@display) {
my $include_list;
foreach my $cluster (sort { $a cmp $b } keys %{$d->{'clusters'}}) {
my $cluster_list;
my $heading = '';
if($cluster) {
$heading .= "<h4>$cluster:</h4>";
}
foreach my $key (@{$self->_sort_values($d->{'clusters'}{$cluster})}) {
$cluster_list .=
$self->_content_li($key,$all{$key},!!$included{$key},!!$partial{$key});
}
$include_list .= qq(<div>$heading<ul class="included">$cluster_list</ul></div>);
}
# The heading
my $include_title = $self->{'included_header'};
my $category_heading = ($self->{'category_titles'}||{})->{$d->{'category'}} || $d->{'category'};
$category_heading = '' unless defined $category_heading;
$include_title =~ s/\{category\}/$category_heading/g;
# Do it
next unless $include_list or $d->{'category'};
$include_html .= qq(<h2>$include_title</h2>$include_list);
}
my $content = sprintf('
<div class="content">
<form action="%s" method="get" class="hidden">%s</form>
<div class="cloud_filter">
<input type="text" name="cloud_filter" id="cloud_filter" tabindex="0" class="ftext" placeholder="type to filter options..."/>
<a href="#" class="cloud_filter_clear">clear filter</a>
<div class="cloud_all_none">
<span class="all">ALL ON</span>
<span class="none">ALL OFF</span>
</div>
</div>
<div class="cloud_multi_selector_list">
%s
</div>
<p class="invisible">.</p>
</div>',
$url->[0],
$extra_inputs,
$include_html,
);
my $partial = '';
if(%partial) {
$partial = qq(<div><span class="partial">PARTIAL</span></div>);
}
my $hint = qq(
<div class="cloud_flip_hint">
<div class="cloud_flip_hint_wrap">
<div class="info">
<h3>tip</h3>
<div class="error_pad">
<div>
<h1>click to flip</h1>
<span class="on">ON</span>
<span class="flip_icon"></span>
<span class="off">OFF</span>
</div>
$partial
</div>
</div>
</div>
</div>
);
my $param_mode = $self->{'param_mode'};
$param_mode ||= 'multi';
return $self->jsonify({
content => $content,
panelType => $self->{'panel_type'},
activeTab => $self->{'rel'},
wrapper => qq{<div class="modal_wrapper"><div class="panel"></div></div>},
nav => "$select_by$hint",
params => { urlParam => $self->{'url_param'}, paramMode => $param_mode, %{$self->{'extra_params'}||{}} },
});
}
1;
| 34.876404 | 215 | 0.579736 |
ed1f927d270dd5fbe8553df8fe7fd5ad839d552c | 1,736 | pl | Perl | perl/src/lib/exceptions.pl | nokibsarkar/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 2,293 | 2015-01-02T12:46:10.000Z | 2022-03-29T09:45:43.000Z | perl/src/lib/exceptions.pl | nokibsarkar/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 315 | 2015-05-31T11:55:46.000Z | 2022-01-12T08:36:37.000Z | perl/src/lib/exceptions.pl | nokibsarkar/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 1,033 | 2015-01-04T07:48:40.000Z | 2022-03-24T09:34:37.000Z | # exceptions.pl
# [email protected]
#
# This library is no longer being maintained, and is included for backward
# compatibility with Perl 4 programs which may require it.
#
# In particular, this should not be used as an example of modern Perl
# programming techniques.
#
#
# Here's a little code I use for exception handling. It's really just
# glorfied eval/die. The way to use use it is when you might otherwise
# exit, use &throw to raise an exception. The first enclosing &catch
# handler looks at the exception and decides whether it can catch this kind
# (catch takes a list of regexps to catch), and if so, it returns the one it
# caught. If it *can't* catch it, then it will reraise the exception
# for someone else to possibly see, or to die otherwise.
#
# I use oddly named variables in order to make darn sure I don't conflict
# with my caller. I also hide in my own package, and eval the code in his.
#
# The EXCEPTION: prefix is so you can tell whether it's a user-raised
# exception or a perl-raised one (eval error).
#
# --tom
#
# examples:
# if (&catch('/$user_input/', 'regexp', 'syntax error') {
# warn "oops try again";
# redo;
# }
#
# if ($error = &catch('&subroutine()')) { # catches anything
#
# &throw('bad input') if /^$/;
sub catch {
package exception;
local($__code__, @__exceptions__) = @_;
local($__package__) = caller;
local($__exception__);
eval "package $__package__; $__code__";
if ($__exception__ = &'thrown) {
for (@__exceptions__) {
return $__exception__ if /$__exception__/;
}
&'throw($__exception__);
}
}
sub throw {
local($exception) = @_;
die "EXCEPTION: $exception\n";
}
sub thrown {
$@ =~ /^(EXCEPTION: )+(.+)/ && $2;
}
1;
| 28 | 76 | 0.673963 |
ed96bfdd632919404ce5ecd198c21ae44d880a35 | 2,653 | pl | Perl | virtual-server/modify-custom.pl | diogovi/responsivebacula | 9a584500074a04a3147a30b1249249d6113afdab | [
"BSD-3-Clause"
] | null | null | null | virtual-server/modify-custom.pl | diogovi/responsivebacula | 9a584500074a04a3147a30b1249249d6113afdab | [
"BSD-3-Clause"
] | null | null | null | virtual-server/modify-custom.pl | diogovi/responsivebacula | 9a584500074a04a3147a30b1249249d6113afdab | [
"BSD-3-Clause"
] | 3 | 2016-09-23T03:42:35.000Z | 2020-11-06T11:01:34.000Z | #!/usr/bin/perl
=head1 modify-custom.pl
Modify custom fields for a virtual server
This program updates the value of one or more fields for a single virtual
server. The parameter C<--domain> must be given, and must be followed by the
domain name of the server to update. You must also supply the C<--set> parameter
at least once, which has to be followed by the code for the field to update
and the new value.
=cut
package virtual_server;
if (!$module_name) {
$main::no_acl_check++;
$ENV{'WEBMIN_CONFIG'} ||= "/etc/webmin";
$ENV{'WEBMIN_VAR'} ||= "/var/webmin";
if ($0 =~ /^(.*)\/[^\/]+$/) {
chdir($pwd = $1);
}
else {
chop($pwd = `pwd`);
}
$0 = "$pwd/modify-custom.pl";
require './virtual-server-lib.pl';
$< == 0 || die "modify-custom.pl must be run as root";
}
@OLDARGV = @ARGV;
use POSIX;
# Parse command-line args
$owner = 1;
while(@ARGV > 0) {
local $a = shift(@ARGV);
if ($a eq "--domain") {
$domain = shift(@ARGV);
}
elsif ($a eq "--set") {
$field = shift(@ARGV);
if ($field =~ /^(\S+)\s+(.*)$/) {
# Name and value in one parameter, such as from HTTP API
$field = $1;
$value = $2;
}
else {
$value = shift(@ARGV);
}
$field && defined($value) ||
&usage("--set must be followed by a field name and value");
push(@set, [ $field, $value ]);
}
elsif ($a eq "--allow-missing") {
$allow_missing = 1;
}
elsif ($a eq "--multiline") {
$multiline = 1;
}
else {
&usage("Unknown parameter $a");
}
}
$domain && @set || &usage("No domain or fields to set specified");
# Get the domain
$dom = &get_domain_by("dom", $domain);
$dom || usage("Virtual server $domain does not exist.");
$old = { %$dom };
# Run the before script
&set_domain_envs($old, "MODIFY_DOMAIN", $dom);
$merr = &making_changes();
&reset_domain_envs($old);
&usage($merr) if ($merr);
# Update all fields
@fields = &list_custom_fields();
foreach $f (@set) {
($field) = grep { $_->{'name'} eq $f->[0] ||
$_->{'desc'} eq $f->[0] } @fields;
$field || $allow_missing ||
&usage("No custom field named $f->[0] exists");
$dom->{'field_'.($field->{'name'} || $f->[0])} = $f->[1];
}
&save_domain($dom);
# Run the after script
&set_domain_envs($dom, "MODIFY_DOMAIN", undef, $old);
&made_changes();
&reset_domain_envs($dom);
&run_post_actions_silently();
&virtualmin_api_log(\@OLDARGV, $dom);
print "Custom field values in $domain successfully updated\n";
sub usage
{
print "$_[0]\n\n" if ($_[0]);
print "Sets the values of one or more custom fields for a virtual server\n";
print "\n";
print "virtualmin modify-custom --domain name\n";
print " <--set \"field value\">+\n";
exit(1);
}
| 24.564815 | 80 | 0.61176 |
ed7943675be03ca2572cf4858058abfaac8f4bf9 | 1,473 | pm | Perl | core/server/OpenXPKI/Server/Authentication/Anonymous.pm | ptomulik/openxpki | 0e9459fc4b24f5bf9652c3345ea4ed68fdbc4381 | [
"Apache-2.0"
] | null | null | null | core/server/OpenXPKI/Server/Authentication/Anonymous.pm | ptomulik/openxpki | 0e9459fc4b24f5bf9652c3345ea4ed68fdbc4381 | [
"Apache-2.0"
] | null | null | null | core/server/OpenXPKI/Server/Authentication/Anonymous.pm | ptomulik/openxpki | 0e9459fc4b24f5bf9652c3345ea4ed68fdbc4381 | [
"Apache-2.0"
] | null | null | null | ## OpenXPKI::Server::Authentication::Anonymous.pm
##
## Written 2006 by Michael Bell
## Updated to use new Service::Default semantics 2007 by Alexander Klink
## (C) Copyright 2006 by The OpenXPKI Project
use strict;
use warnings;
package OpenXPKI::Server::Authentication::Anonymous;
use OpenXPKI::Debug;
use OpenXPKI::Exception;
use OpenXPKI::Server::Context qw( CTX );
## constructor and destructor stuff
sub new {
my $that = shift;
my $class = ref($that) || $that;
my $self = {};
bless $self, $class;
my $path = shift;
##! 1: "start"
$self->{ROLE} = CTX('config')->get("$path.role") || 'Anonymous';
##! 2: "role: ".$self->{ROLE}
return $self;
}
sub login_step {
##! 1: 'start'
my $self = shift;
my $arg_ref = shift;
my $name = $arg_ref->{HANDLER};
my $msg = $arg_ref->{MESSAGE};
return (
'I18N_OPENXPKI_ANONYMOUS_USER',
$self->{ROLE},
{
SERVICE_MSG => 'SERVICE_READY',
},
);
}
1;
__END__
=head1 Name
OpenXPKI::Server::Authentication::Anonymous - anonymous authentication
implementation.
=head1 Description
This is the class which supports OpenXPKI with an anonymous
authentication method. The parameters are passed as a hash reference.
=head1 Functions
=head2 new
is the constructor. It requires the config prefix as single argument.
=head2 login_step
returns the tripe ('', $self->{ROLE}, and the service ready message)
| 19.64 | 72 | 0.645621 |
edc2f0f8e996720c112902cccae8fedba4b4d85f | 917 | t | Perl | YDR418W_YEL054C_estimatedTau_Noinitial/IGCgeo_50.0/sim_49/2NG.t | xji3/IGCCodonSimulation | 2daf1e566ec01eb6c59693d5758be7db91edc57b | [
"MIT"
] | null | null | null | YDR418W_YEL054C_estimatedTau_Noinitial/IGCgeo_50.0/sim_49/2NG.t | xji3/IGCCodonSimulation | 2daf1e566ec01eb6c59693d5758be7db91edc57b | [
"MIT"
] | null | null | null | YDR418W_YEL054C_estimatedTau_Noinitial/IGCgeo_50.0/sim_49/2NG.t | xji3/IGCCodonSimulation | 2daf1e566ec01eb6c59693d5758be7db91edc57b | [
"MIT"
] | null | null | null | 13
castelliiYDR418W
castelliiYEL054C 0.2413
mikataeYDR418W 0.4921 0.5140
mikataeYEL054C 0.4797 0.4941 0.4963
kluyveriYDR418W 0.4472 0.4575 0.6399 0.6436
paradoxusYDR418W 0.4732 0.4861 0.0685 0.4707 0.6064
paradoxusYEL054C 0.4947 0.5506 0.5120 0.3100 0.6331 0.4755
kudriavzeviiYDR418W 0.5622 0.5780 0.1870 0.5886 0.7014 0.1799 0.5850
kudriavzeviiYEL054C 0.4808 0.5085 0.4410 0.3372 0.6657 0.4154 0.4417 0.4214
bayanusYDR418W 0.4469 0.4577 0.1397 0.4948 0.5884 0.1329 0.5142 0.2134 0.4070
bayanusYEL054C 0.4132 0.4520 0.6029 0.2670 0.5641 0.5801 0.2629 0.6694 0.3475 0.5325
cerevisiaeYDR418W 0.4884 0.5014 0.1329 0.4681 0.6577 0.0749 0.4589 0.2073 0.4123 0.1857 0.5761
cerevisiaeYEL054C 0.4953 0.5522 0.4282 0.4575 0.6311 0.3817 0.2278 0.4968 0.4053 0.4329 0.3858 0.3374
| 61.133333 | 117 | 0.653217 |
ed64892dc801d1e59ef657e63c7a41d7e6a17702 | 2,440 | t | Perl | t/configure/012-verbose.t | kentfredric/parrot | ebd9b7e9a12f65beebdc61d7e428e11fffc6a2fc | [
"Artistic-2.0"
] | 1 | 2020-01-21T11:33:12.000Z | 2020-01-21T11:33:12.000Z | t/configure/012-verbose.t | kentfredric/parrot | ebd9b7e9a12f65beebdc61d7e428e11fffc6a2fc | [
"Artistic-2.0"
] | null | null | null | t/configure/012-verbose.t | kentfredric/parrot | ebd9b7e9a12f65beebdc61d7e428e11fffc6a2fc | [
"Artistic-2.0"
] | null | null | null | #! perl
# Copyright (C) 2007, Parrot Foundation.
# 012-verbose.t
use strict;
use warnings;
use Test::More tests => 12;
use Carp;
use lib qw( lib t/configure/testlib );
use Parrot::Configure;
use Parrot::Configure::Options qw( process_options );
use IO::CaptureOutput qw | capture |;
$| = 1;
is( $|, 1, "output autoflush is set" );
my ($args, $step_list_ref) = process_options(
{
argv => [q{--verbose}],
mode => q{configure},
}
);
ok( defined $args, "process_options returned successfully" );
my %args = %$args;
my $conf = Parrot::Configure->new;
ok( defined $conf, "Parrot::Configure->new() returned okay" );
my $step = q{init::beta};
my $description = 'Determining if your computer does beta';
$conf->add_steps($step);
my @confsteps = @{ $conf->steps };
isnt( scalar @confsteps, 0,
"Parrot::Configure object 'steps' key holds non-empty array reference" );
is( scalar @confsteps, 1, "Parrot::Configure object 'steps' key holds ref to 1-element array" );
my $nontaskcount = 0;
foreach my $k (@confsteps) {
$nontaskcount++ unless $k->isa("Parrot::Configure::Task");
}
is( $nontaskcount, 0, "Each step is a Parrot::Configure::Task object" );
is( $confsteps[0]->step, $step, "'step' element of Parrot::Configure::Task struct identified" );
ok( !ref( $confsteps[0]->object ),
"'object' element of Parrot::Configure::Task struct is not yet a ref" );
$conf->options->set(%args);
is( $conf->options->{c}->{debugging},
1, "command-line option '--debugging' has been stored in object" );
{
my $rv;
my $stdout;
capture ( sub {$rv = $conf->runsteps}, \$stdout );
ok( $rv, "runsteps successfully ran $step" );
like(
$stdout,
qr/$description\.\.\..*beta is verbose.*You've got beta.*done\./s, #'
"Got message expected upon running $step"
);
}
pass("Completed all tests in $0");
################### DOCUMENTATION ###################
=head1 NAME
012-verbose.t - test the C<--verbose> command-line argument
=head1 SYNOPSIS
% prove t/configure/012-verbose.t
=head1 DESCRIPTION
The files in this directory test functionality used by F<Configure.pl>.
The tests in this file examine what happens when you configure with
C<--verbose>.
=head1 AUTHOR
James E Keenan
=head1 SEE ALSO
Parrot::Configure, F<Configure.pl>.
=cut
# Local Variables:
# mode: cperl
# cperl-indent-level: 4
# fill-column: 100
# End:
# vim: expandtab shiftwidth=4:
| 24.897959 | 96 | 0.64959 |
eddaa4f9adebf67b18a38811003a00e32706db12 | 7,325 | pm | Perl | perl5/lib/perl5/Test2/Manual/Testing/Introduction.pm | jinnks/printevolve | 8c54f130000cd6ded290f5905bdc2093d9f264da | [
"Apache-2.0"
] | null | null | null | perl5/lib/perl5/Test2/Manual/Testing/Introduction.pm | jinnks/printevolve | 8c54f130000cd6ded290f5905bdc2093d9f264da | [
"Apache-2.0"
] | null | null | null | perl5/lib/perl5/Test2/Manual/Testing/Introduction.pm | jinnks/printevolve | 8c54f130000cd6ded290f5905bdc2093d9f264da | [
"Apache-2.0"
] | null | null | null | package Test2::Manual::Testing::Introduction;
use strict;
use warnings;
our $VERSION = '0.000138';
1;
__END__
=head1 NAME
Test2::Manual::Testing::Introduction - Introduction to testing with Test2.
=head1 DESCRIPTION
This tutorial is a beginners introduction to testing. This will take you
through writing a test file, making assertions, and running your test.
=head1 BOILERPLATE
=head2 THE TEST FILE
Test files typically are placed inside the C<t/> directory, and end with the
C<.t> file extension.
C<t/example.t>:
use Test2::V0;
# Assertions will go here
done_testing;
This is all the boilerplate you need.
=over 4
=item use Test2::V0;
This loads a collection of testing tools that will be described later in the
tutorial. This will also turn on C<strict> and C<warnings> for you.
=item done_testing;
This should always be at the end of your test files. This tells L<Test2> that
you are done making assertions. This is important as C<test2> will assume the
test did not complete successfully without this, or some other form of test
"plan".
=back
=head2 DIST CONFIG
You should always list bundles and tools directly. You should not simply list
L<Test2::Suite> and call it done, bundles and tools may be moved out of
L<Test2::Suite> to their own dists at any time.
=head3 Dist::Zilla
[Prereqs / TestRequires]
Test2::V0 = 0.000060
=head3 ExtUtils::MakeMaker
my %WriteMakefileArgs = (
...,
"TEST_REQUIRES" => {
"Test2::V0" => "0.000060"
},
...
);
=head3 Module::Install
test_requires 'Test2::V0' => '0.000060';
=head3 Module::Build
my $build = Module::Build->new(
...,
test_requires => {
"Test2::V0" => "0.000060",
},
...
);
=head1 MAKING ASSERTIONS
The most simple tool for making assertions is C<ok()>. C<ok()> lets you assert
that a condition is true.
ok($CONDITION, "Description of the condition");
Here is a complete C<t/example.t>:
use Test2::V0;
ok(1, "1 is true, so this will pass");
done_testing;
=head1 RUNNING THE TEST
Test files are simply scripts. Just like any other script you can run the test
directly with perl. Another option is to use a test "harness" which runs the
test for you, and provides extra information and checks the scripts exit value
for you.
=head2 RUN DIRECTLY
$ perl -Ilib t/example.t
Which should produce output like this:
# Seeded srand with seed '20161028' from local date.
ok 1 - 1 is true, so this will pass
1..1
If the test had failed (C<ok(0, ...)>) it would look like this:
# Seeded srand with seed '20161028' from local date.
not ok 1 - 0 is false, so this will fail
1..1
Test2 will also set the exit value of the script, a successful run will have an
exit value of 0, a failed run will have a non-zero exit value.
=head2 USING YATH
The C<yath> command line tool is provided by L<Test2::Harness> which you may
need to install yourself from cpan. C<yath> is the harness written specifically
for L<Test2>.
$ yath -Ilib t/example.t
This will produce output similar to this:
( PASSED ) job 1 t/example.t
================================================================================
Run ID: 1508027909
All tests were successful!
You can also request verbose output with the C<-v> flag:
$ yath -Ilib -v t/example.t
Which produces:
( LAUNCH ) job 1 example.t
( NOTE ) job 1 Seeded srand with seed '20171014' from local date.
[ PASS ] job 1 + 1 is true, so this will pass
[ PLAN ] job 1 Expected asserions: 1
( PASSED ) job 1 example.t
================================================================================
Run ID: 1508028002
All tests were successful!
=head2 USING PROVE
The C<prove> command line tool is provided by the L<Test::Harness> module which
comes with most versions of perl. L<Test::Harness> is dual-life, which means
you can also install the latest version from cpan.
$ prove -Ilib t/example.t
This will produce output like this:
example.t .. ok
All tests successful.
Files=1, Tests=1, 0 wallclock secs ( 0.01 usr 0.00 sys + 0.05 cusr 0.00 csys = 0.06 CPU)
Result: PASS
You can also request verbose output with the C<-v> flag:
$ prove -Ilib -v t/example.t
The verbose output looks like this:
example.t ..
# Seeded srand with seed '20161028' from local date.
ok 1 - 1 is true, so this will pass
1..1
ok
All tests successful.
Files=1, Tests=1, 0 wallclock secs ( 0.02 usr 0.00 sys + 0.06 cusr 0.00 csys = 0.08 CPU)
Result: PASS
=head1 THE "PLAN"
All tests need a "plan". The job of a plan is to make sure you ran all the
tests you expected. The plan prevents a passing result from a test that exits
before all the tests are run.
There are 2 primary ways to set the plan:
=over 4
=item done_testing()
The most common, and recommended way to set a plan is to add C<done_testing> at
the end of your test file. This will automatically calculate the plan for you
at the end of the test. If the test were to exit early then C<done_testing>
would not run and no plan would be found, forcing a failure.
=item plan($COUNT)
The C<plan()> function allows you to specify an exact number of assertions you
want to run. If you run too many or too few assertions then the plan will not
match and it will be counted as a failure. The primary problem with this way of
planning is that you need to add up the number of assertions, and adjust the
count whenever you update the test file.
C<plan()> must be used before all assertions, or after all assertions, it
cannot be done in the middle of making assertions.
=back
=head1 ADDITIONAL ASSERTION TOOLS
The L<Test2::V0> bundle provides a lot more than C<ok()>,
C<plan()>, and C<done_testing()>. The biggest tools to note are:
=over 4
=item is($a, $b, $description)
C<is()> allows you to compare 2 structures and insure they are identical. You
can use it for simple string comparisons, or even deep data structure
comparisons.
is("foo", "foo", "Both strings are identical");
is(["foo", 1], ["foo", 1], "Both arrays contain the same elements");
=item like($a, $b, $description)
C<like()> is similar to C<is()> except that it only checks items listed on the
right, it ignores any extra values found on the left.
like([1, 2, 3, 4], [1, 2, 3], "Passes, the extra element on the left is ignored");
You can also used regular expressions on the right hand side:
like("foo bar baz", qr/bar/, "The string matches the regex, this passes");
You can also nest the regexes:
like([1, 2, 'foo bar baz', 3], [1, 2, qr/bar/], "This passes");
=back
=head1 SEE ALSO
L<Test2::Manual> - Primary index of the manual.
=head1 SOURCE
The source code repository for Test2-Manual can be found at
F<https://github.com/Test-More/Test2-Suite/>.
=head1 MAINTAINERS
=over 4
=item Chad Granum E<lt>[email protected]<gt>
=back
=head1 AUTHORS
=over 4
=item Chad Granum E<lt>[email protected]<gt>
=back
=head1 COPYRIGHT
Copyright 2018 Chad Granum E<lt>[email protected]<gt>.
This program is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
See F<http://dev.perl.org/licenses/>
=cut
| 24.914966 | 97 | 0.679863 |
ed85c3c96496f0d7e14d92a6779889de3d850b7e | 15,070 | pm | Perl | Mojoqq/perl/vendor/lib/Template/VMethods.pm | ghuan/Mojo-webqq-for-windows | ad44014da4578f99aa3efad0b55f0fc3bc3af322 | [
"Unlicense"
] | null | null | null | Mojoqq/perl/vendor/lib/Template/VMethods.pm | ghuan/Mojo-webqq-for-windows | ad44014da4578f99aa3efad0b55f0fc3bc3af322 | [
"Unlicense"
] | 3 | 2016-09-22T07:23:29.000Z | 2017-02-01T01:39:44.000Z | Mojoqq/perl/vendor/lib/Template/VMethods.pm | ghuan/Mojo-webqq-for-windows | ad44014da4578f99aa3efad0b55f0fc3bc3af322 | [
"Unlicense"
] | 10 | 2016-09-13T02:10:40.000Z | 2021-07-11T23:11:01.000Z | #============================================================= -*-Perl-*-
#
# Template::VMethods
#
# DESCRIPTION
# Module defining virtual methods for the Template Toolkit
#
# AUTHOR
# Andy Wardley <[email protected]>
#
# COPYRIGHT
# Copyright (C) 1996-2006 Andy Wardley. All Rights Reserved.
#
# This module is free software; you can redistribute it and/or
# modify it under the same terms as Perl itself.
#
# REVISION
# $Id$
#
#============================================================================
package Template::VMethods;
use strict;
use warnings;
use Scalar::Util qw( blessed looks_like_number );
use Template::Filters;
require Template::Stash;
our $VERSION = 2.16;
our $DEBUG = 0 unless defined $DEBUG;
our $PRIVATE = $Template::Stash::PRIVATE;
our $ROOT_VMETHODS = {
inc => \&root_inc,
dec => \&root_dec,
};
our $TEXT_VMETHODS = {
item => \&text_item,
list => \&text_list,
hash => \&text_hash,
length => \&text_length,
size => \&text_size,
defined => \&text_defined,
upper => \&text_upper,
lower => \&text_lower,
ucfirst => \&text_ucfirst,
lcfirst => \&text_lcfirst,
match => \&text_match,
search => \&text_search,
repeat => \&text_repeat,
replace => \&text_replace,
remove => \&text_remove,
split => \&text_split,
chunk => \&text_chunk,
substr => \&text_substr,
trim => \&text_trim,
collapse => \&text_collapse,
squote => \&text_squote,
dquote => \&text_dquote,
html => \&Template::Filters::html_filter,
xml => \&Template::Filters::xml_filter,
};
our $HASH_VMETHODS = {
item => \&hash_item,
hash => \&hash_hash,
size => \&hash_size,
each => \&hash_each,
keys => \&hash_keys,
values => \&hash_values,
items => \&hash_items,
pairs => \&hash_pairs,
list => \&hash_list,
exists => \&hash_exists,
defined => \&hash_defined,
delete => \&hash_delete,
import => \&hash_import,
sort => \&hash_sort,
nsort => \&hash_nsort,
};
our $LIST_VMETHODS = {
item => \&list_item,
list => \&list_list,
hash => \&list_hash,
push => \&list_push,
pop => \&list_pop,
unshift => \&list_unshift,
shift => \&list_shift,
max => \&list_max,
size => \&list_size,
defined => \&list_defined,
first => \&list_first,
last => \&list_last,
reverse => \&list_reverse,
grep => \&list_grep,
join => \&list_join,
sort => \&list_sort,
nsort => \&list_nsort,
unique => \&list_unique,
import => \&list_import,
merge => \&list_merge,
slice => \&list_slice,
splice => \&list_splice,
};
#========================================================================
# root virtual methods
#========================================================================
sub root_inc {
no warnings;
my $item = shift;
++$item;
}
sub root_dec {
no warnings;
my $item = shift;
--$item;
}
#========================================================================
# text virtual methods
#========================================================================
sub text_item {
$_[0];
}
sub text_list {
[ $_[0] ];
}
sub text_hash {
{ value => $_[0] };
}
sub text_length {
length $_[0];
}
sub text_size {
return 1;
}
sub text_defined {
return 1;
}
sub text_upper {
return uc $_[0];
}
sub text_lower {
return lc $_[0];
}
sub text_ucfirst {
return ucfirst $_[0];
}
sub text_lcfirst {
return lcfirst $_[0];
}
sub text_trim {
for ($_[0]) {
s/^\s+//;
s/\s+$//;
}
return $_[0];
}
sub text_collapse {
for ($_[0]) {
s/^\s+//;
s/\s+$//;
s/\s+/ /g
}
return $_[0];
}
sub text_match {
my ($str, $search, $global) = @_;
return $str unless defined $str and defined $search;
my @matches = $global ? ($str =~ /$search/g)
: ($str =~ /$search/);
return @matches ? \@matches : '';
}
sub text_search {
my ($str, $pattern) = @_;
return $str unless defined $str and defined $pattern;
return $str =~ /$pattern/;
}
sub text_repeat {
my ($str, $count) = @_;
$str = '' unless defined $str;
return '' unless $count;
$count ||= 1;
return $str x $count;
}
sub text_replace {
my ($text, $pattern, $replace, $global) = @_;
$text = '' unless defined $text;
$pattern = '' unless defined $pattern;
$replace = '' unless defined $replace;
$global = 1 unless defined $global;
if ($replace =~ /\$\d+/) {
# replacement string may contain backrefs
my $expand = sub {
my ($chunk, $start, $end) = @_;
$chunk =~ s{ \\(\\|\$) | \$ (\d+) }{
$1 ? $1
: ($2 > $#$start || $2 == 0 || !defined $start->[$2]) ? ''
: substr($text, $start->[$2], $end->[$2] - $start->[$2]);
}exg;
$chunk;
};
if ($global) {
$text =~ s{$pattern}{ &$expand($replace, [@-], [@+]) }eg;
}
else {
$text =~ s{$pattern}{ &$expand($replace, [@-], [@+]) }e;
}
}
else {
if ($global) {
$text =~ s/$pattern/$replace/g;
}
else {
$text =~ s/$pattern/$replace/;
}
}
return $text;
}
sub text_remove {
my ($str, $search) = @_;
return $str unless defined $str and defined $search;
$str =~ s/$search//g;
return $str;
}
sub text_split {
my ($str, $split, $limit) = @_;
$str = '' unless defined $str;
# For versions of Perl prior to 5.18 we have to be very careful about
# spelling out each possible combination of arguments because split()
# is very sensitive to them, for example C<split(' ', ...)> behaves
# differently to C<$space=' '; split($space, ...)>. Test 33 of
# vmethods/text.t depends on this behaviour.
if ($] < 5.018) {
if (defined $limit) {
return [ defined $split
? split($split, $str, $limit)
: split(' ', $str, $limit) ];
}
else {
return [ defined $split
? split($split, $str)
: split(' ', $str) ];
}
}
# split's behavior changed in Perl 5.18.0 making this:
# C<$space=' '; split($space, ...)>
# behave the same as this:
# C<split(' ', ...)>
# qr// behaves the same, so use that for user-defined split.
my $split_re;
if (defined $split) {
eval {
$split_re = qr/$split/;
};
}
$split_re = ' ' unless defined $split_re;
$limit ||= 0;
return [split($split_re, $str, $limit)];
}
sub text_chunk {
my ($string, $size) = @_;
my @list;
$size ||= 1;
if ($size < 0) {
# sexeger! It's faster to reverse the string, search
# it from the front and then reverse the output than to
# search it from the end, believe it nor not!
$string = reverse $string;
$size = -$size;
unshift(@list, scalar reverse $1)
while ($string =~ /((.{$size})|(.+))/g);
}
else {
push(@list, $1) while ($string =~ /((.{$size})|(.+))/g);
}
return \@list;
}
sub text_substr {
my ($text, $offset, $length, $replacement) = @_;
$offset ||= 0;
if(defined $length) {
if (defined $replacement) {
substr( $text, $offset, $length, $replacement );
return $text;
}
else {
return substr( $text, $offset, $length );
}
}
else {
return substr( $text, $offset );
}
}
sub text_squote {
my $text = shift;
for ($text) {
s/(['\\])/\\$1/g;
}
return $text;
}
sub text_dquote {
my $text = shift;
for ($text) {
s/(["\\])/\\$1/g;
s/\n/\\n/g;
}
return $text;
}
#========================================================================
# hash virtual methods
#========================================================================
sub hash_item {
my ($hash, $item) = @_;
$item = '' unless defined $item;
return if $PRIVATE && $item =~ /$PRIVATE/;
$hash->{ $item };
}
sub hash_hash {
$_[0];
}
sub hash_size {
scalar keys %{$_[0]};
}
sub hash_each {
# this will be changed in TT3 to do what hash_pairs() does
[ %{ $_[0] } ];
}
sub hash_keys {
[ keys %{ $_[0] } ];
}
sub hash_values {
[ values %{ $_[0] } ];
}
sub hash_items {
[ %{ $_[0] } ];
}
sub hash_pairs {
[ map {
{ key => $_ , value => $_[0]->{ $_ } }
}
sort keys %{ $_[0] }
];
}
sub hash_list {
my ($hash, $what) = @_;
$what ||= '';
return ($what eq 'keys') ? [ keys %$hash ]
: ($what eq 'values') ? [ values %$hash ]
: ($what eq 'each') ? [ %$hash ]
: # for now we do what pairs does but this will be changed
# in TT3 to return [ $hash ] by default
[ map { { key => $_ , value => $hash->{ $_ } } }
sort keys %$hash
];
}
sub hash_exists {
exists $_[0]->{ $_[1] };
}
sub hash_defined {
# return the item requested, or 1 if no argument
# to indicate that the hash itself is defined
my $hash = shift;
return @_ ? defined $hash->{ $_[0] } : 1;
}
sub hash_delete {
my $hash = shift;
delete $hash->{ $_ } for @_;
}
sub hash_import {
my ($hash, $imp) = @_;
$imp = {} unless ref $imp eq 'HASH';
@$hash{ keys %$imp } = values %$imp;
return '';
}
sub hash_sort {
my ($hash) = @_;
[ sort { lc $hash->{$a} cmp lc $hash->{$b} } (keys %$hash) ];
}
sub hash_nsort {
my ($hash) = @_;
[ sort { $hash->{$a} <=> $hash->{$b} } (keys %$hash) ];
}
#========================================================================
# list virtual methods
#========================================================================
sub list_item {
$_[0]->[ $_[1] || 0 ];
}
sub list_list {
$_[0];
}
sub list_hash {
my $list = shift;
if (@_) {
my $n = shift || 0;
return { map { ($n++, $_) } @$list };
}
no warnings;
return { @$list };
}
sub list_push {
my $list = shift;
push(@$list, @_);
return '';
}
sub list_pop {
my $list = shift;
pop(@$list);
}
sub list_unshift {
my $list = shift;
unshift(@$list, @_);
return '';
}
sub list_shift {
my $list = shift;
shift(@$list);
}
sub list_max {
no warnings;
my $list = shift;
$#$list;
}
sub list_size {
no warnings;
my $list = shift;
$#$list + 1;
}
sub list_defined {
# return the item requested, or 1 if no argument to
# indicate that the hash itself is defined
my $list = shift;
return 1 unless @_; # list.defined is always true
return unless looks_like_number $_[0]; # list.defined('bah') is always false
return defined $list->[$_[0]]; # list.defined(n)
}
sub list_first {
my $list = shift;
return $list->[0] unless @_;
return [ @$list[0..$_[0]-1] ];
}
sub list_last {
my $list = shift;
return $list->[-1] unless @_;
return [ @$list[-$_[0]..-1] ];
}
sub list_reverse {
my $list = shift;
[ reverse @$list ];
}
sub list_grep {
my ($list, $pattern) = @_;
$pattern ||= '';
return [ grep /$pattern/, @$list ];
}
sub list_join {
my ($list, $joint) = @_;
join(defined $joint ? $joint : ' ',
map { defined $_ ? $_ : '' } @$list);
}
sub _list_sort_make_key {
my ($item, $fields) = @_;
my @keys;
if (ref($item) eq 'HASH') {
@keys = map { $item->{ $_ } } @$fields;
}
elsif (blessed $item) {
@keys = map { $item->can($_) ? $item->$_() : $item } @$fields;
}
else {
@keys = $item;
}
# ugly hack to generate a single string using a delimiter that is
# unlikely (but not impossible) to be found in the wild.
return lc join('/*^UNLIKELY^*/', map { defined $_ ? $_ : '' } @keys);
}
sub list_sort {
my ($list, @fields) = @_;
return $list unless @$list > 1; # no need to sort 1 item lists
return [
@fields # Schwartzian Transform
? map { $_->[0] } # for case insensitivity
sort { $a->[1] cmp $b->[1] }
map { [ $_, _list_sort_make_key($_, \@fields) ] }
@$list
: map { $_->[0] }
sort { $a->[1] cmp $b->[1] }
map { [ $_, lc $_ ] }
@$list,
];
}
sub list_nsort {
my ($list, @fields) = @_;
return $list unless @$list > 1; # no need to sort 1 item lists
return [
@fields # Schwartzian Transform
? map { $_->[0] } # for case insensitivity
sort { $a->[1] <=> $b->[1] }
map { [ $_, _list_sort_make_key($_, \@fields) ] }
@$list
: map { $_->[0] }
sort { $a->[1] <=> $b->[1] }
map { [ $_, lc $_ ] }
@$list,
];
}
sub list_unique {
my %u;
[ grep { ++$u{$_} == 1 } @{$_[0]} ];
}
sub list_import {
my $list = shift;
push(@$list, grep defined, map ref eq 'ARRAY' ? @$_ : undef, @_);
return $list;
}
sub list_merge {
my $list = shift;
return [ @$list, grep defined, map ref eq 'ARRAY' ? @$_ : undef, @_ ];
}
sub list_slice {
my ($list, $from, $to) = @_;
$from ||= 0;
$to = $#$list unless defined $to;
$from += @$list if $from < 0;
$to += @$list if $to < 0;
return [ @$list[$from..$to] ];
}
sub list_splice {
my ($list, $offset, $length, @replace) = @_;
if (@replace) {
# @replace can contain a list of multiple replace items, or
# be a single reference to a list
@replace = @{ $replace[0] }
if @replace == 1 && ref $replace[0] eq 'ARRAY';
return [ splice @$list, $offset, $length, @replace ];
}
elsif (defined $length) {
return [ splice @$list, $offset, $length ];
}
elsif (defined $offset) {
return [ splice @$list, $offset ];
}
else {
return [ splice(@$list) ];
}
}
1;
__END__
=head1 NAME
Template::VMethods - Virtual methods for variables
=head1 DESCRIPTION
The C<Template::VMethods> module implements the virtual methods
that can be applied to variables.
Please see L<Template::Manual::VMethods> for further information.
=head1 AUTHOR
Andy Wardley E<lt>[email protected]<gt> L<http://wardley.org/>
=head1 COPYRIGHT
Copyright (C) 1996-2007 Andy Wardley. All Rights Reserved.
This module is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.
=head1 SEE ALSO
L<Template::Stash>, L<Template::Manual::VMethods>
=cut
# Local Variables:
# mode: perl
# perl-indent-level: 4
# indent-tabs-mode: nil
# End:
#
# vim: expandtab shiftwidth=4:
| 22.526158 | 81 | 0.480093 |
edc436434a0e464ed57874fe33e72049dfe1515c | 2,207 | pm | Perl | apps/tomcat/jmx/plugin.pm | xdrive05/centreon-plugins | 8227ba680fdfd2bb0d8a806ea61ec1611c2779dc | [
"Apache-2.0"
] | 1 | 2021-03-16T22:20:32.000Z | 2021-03-16T22:20:32.000Z | apps/tomcat/jmx/plugin.pm | xdrive05/centreon-plugins | 8227ba680fdfd2bb0d8a806ea61ec1611c2779dc | [
"Apache-2.0"
] | null | null | null | apps/tomcat/jmx/plugin.pm | xdrive05/centreon-plugins | 8227ba680fdfd2bb0d8a806ea61ec1611c2779dc | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2020 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package apps::tomcat::jmx::plugin;
use strict;
use warnings;
use base qw(centreon::plugins::script_custom);
sub new {
my ($class, %options) = @_;
my $self = $class->SUPER::new(package => __PACKAGE__, %options);
bless $self, $class;
$self->{version} = '0.1';
%{$self->{modes}} = (
'class-count' => 'centreon::common::jvm::mode::classcount',
'connector-usage' => 'apps::tomcat::jmx::mode::connectorusage',
'cpu-load' => 'centreon::common::jvm::mode::cpuload',
'datasource-usage' => 'apps::tomcat::jmx::mode::datasourceusage',
'fd-usage' => 'centreon::common::jvm::mode::fdusage',
'gc-usage' => 'centreon::common::jvm::mode::gcusage',
'list-datasources' => 'apps::tomcat::jmx::mode::listdatasources',
'list-webapps' => 'apps::tomcat::jmx::mode::listwebapps',
'load-average' => 'centreon::common::jvm::mode::loadaverage',
'memory' => 'centreon::common::jvm::mode::memory',
'memory-detailed' => 'centreon::common::jvm::mode::memorydetailed',
'threads' => 'centreon::common::jvm::mode::threads',
'webapps-sessions' => 'apps::tomcat::jmx::mode::webappssessions',
);
$self->{custom_modes}{jolokia} = 'centreon::common::protocols::jmx::custom::jolokia';
return $self;
}
1;
__END__
=head1 PLUGIN DESCRIPTION
Check Tomcat in JMX. Need Jolokia agent.
=cut
| 35.596774 | 89 | 0.649298 |
edcd0017c3049c35eaf5cf20bff55506da5b4928 | 1,090 | pl | Perl | ecce_source/check_instance_of/check_instance_of.karp.pl | leuschel/ecce | f7f834bd219759cd7e8b3709801ffe26082c766d | [
"Apache-2.0"
] | 10 | 2015-10-16T08:23:29.000Z | 2020-08-10T18:17:26.000Z | ecce_source/check_instance_of/check_instance_of.karp.pl | leuschel/ecce | f7f834bd219759cd7e8b3709801ffe26082c766d | [
"Apache-2.0"
] | null | null | null | ecce_source/check_instance_of/check_instance_of.karp.pl | leuschel/ecce | f7f834bd219759cd7e8b3709801ffe26082c766d | [
"Apache-2.0"
] | 3 | 2015-10-18T11:11:44.000Z | 2019-02-13T14:18:49.000Z | :- module('check_instance_of.karp',['check_instance_of.karp:get_instance_of'/4]).
% :- dynamic get_instance_of/4.
:- set_prolog_flag(single_var_warnings,off).
:- use_module(library(lists)).
:- use_module('../bimtools').
:- use_module('../calc_chtree').
:- use_module('../global_tree').
:- use_package( .('../ecce_no_rt2') ).
/* must be variant of something with same chtree */
'check_instance_of.karp:get_instance_of'(GoalID,Goal,Chtree,MoreGeneralID) :-
node_is_an_abstraction(GoalID),
copy(Goal,CGoal),
gt_node_goal(MoreGeneralID,CGoal), /* lookup a potential match */
ancestor_node(GoalID,MoreGeneralID),
GoalID \== MoreGeneralID,
gt_node_pe_status(MoreGeneralID,PEStatus),
PEStatus \== no,
gt_node_goal(MoreGeneralID,MoreGeneralGoal),
variant_of(Goal,MoreGeneralGoal),
gt_node_chtree(MoreGeneralID,Chtree). /* same chtree */
ancestor_node(GoalID,ParID2) :-
gt_node_descends_from(GoalID,ParID2,LeafLocalID),
not(LeafLocalID = chpos(abstracted,_)).
ancestor_node(GoalID,ParID3) :-
gt_node_descends_from(GoalID,ParID2,_LeafLocalID),
ancestor_node(ParID2,ParID3).
| 27.948718 | 81 | 0.759633 |
ed68d476dc515be9bd27a2d8a5e7049ada7b3a87 | 3,249 | t | Perl | t/sanity.t | kaladaOpuiyo/lua-resty-global-throttle | e3254211843b68b14248be8746efcf1cb0b2bf71 | [
"MIT"
] | 12 | 2020-05-16T04:07:27.000Z | 2022-01-21T16:03:56.000Z | t/sanity.t | kaladaOpuiyo/lua-resty-global-throttle | e3254211843b68b14248be8746efcf1cb0b2bf71 | [
"MIT"
] | 8 | 2020-12-18T03:51:22.000Z | 2021-03-01T15:39:40.000Z | t/sanity.t | kaladaOpuiyo/lua-resty-global-throttle | e3254211843b68b14248be8746efcf1cb0b2bf71 | [
"MIT"
] | 3 | 2020-06-14T14:01:51.000Z | 2021-04-01T14:14:02.000Z | use Test::Nginx::Socket::Lua 'no_plan';
use Cwd qw(cwd);
my $pwd = cwd();
our $HttpConfig = qq(
lua_package_path "$pwd/t/lib/?.lua;$pwd/lib/?.lua;;";
lua_shared_dict counters 1M;
);
run_tests();
__DATA__
=== TEST 1: all cases
--- http_config eval: $::HttpConfig
--- config
location /protected {
content_by_lua_block {
local global_throttle = require "resty.global_throttle"
local client_throttle = global_throttle.new(10, 0.2, { provider = "shared_dict", name = "counters" })
local args, err = ngx.req.get_uri_args()
if err then
ngx.status = 500
ngx.say(err)
return ngx.exit(ngx.HTTP_OK)
end
local key = args.api_client_id
local should_throttle, err = client_throttle:process(key)
if should_throttle then
ngx.status = 429
ngx.say("throttled")
return ngx.exit(ngx.HTTP_OK)
end
ngx.exit(ngx.HTTP_OK)
}
}
location = /t {
content_by_lua_block {
local res
ngx.log(ngx.NOTICE, "Expect spike to be allowed in the beginning.")
for i=1,10 do
res = ngx.location.capture("/protected?api_client_id=2")
if res.status ~= 200 then
ngx.status = res.status
return ngx.exit(ngx.HTTP_OK)
end
end
ngx.log(ngx.NOTICE, "Expect no throttling since requests will be sent under the configured rate.")
ngx.sleep(0.19) -- we have to wait here because the first 10 requests were sent too fast
for i=1,12 do
-- ensure we are sending requests under the configured rate
local jitter = math.random(10) / 10000
local delay = 0.2 / 12 + jitter
ngx.sleep(delay)
res = ngx.location.capture("/protected?api_client_id=2")
if res.status ~= 200 then
ngx.status = res.status
return ngx.exit(ngx.HTTP_OK)
end
end
ngx.log(ngx.NOTICE, "Expect spike to be throttled because the algorithm remembers previous rate and smothen the load.")
ngx.sleep(0.15)
local throttled = false
for i=1,10 do
res = ngx.location.capture("/protected?api_client_id=2")
if res.status == 429 then
throttled = true
goto continue1
end
end
::continue1::
if not throttled then
ngx.status = 500
return ngx.exit(ngx.HTTP_OK)
end
ngx.log(ngx.NOTICE, "Expect requests to be throttled because they will be sent faster.")
ngx.sleep(0.15)
throttled = false
for i=1,15 do
res = ngx.location.capture("/protected?api_client_id=2")
if res.status == 429 then
throttled = true
goto continue2
end
-- ensure we are sending requests over the configured rate
local delay = 0.15 / 15
ngx.sleep(delay)
end
::continue2::
if not throttled then
ngx.status = 500
return ngx.exit(ngx.HTTP_OK)
end
ngx.log(ngx.NOTICE, "Expect spike when using different key because this will be the first spike.")
for i=1,10 do
res = ngx.location.capture("/protected?api_client_id=1")
if res.status ~= 200 then
ngx.status = res.status
return ngx.exit(ngx.HTTP_OK)
end
end
ngx.status = res.status
ngx.print(res.body)
ngx.exit(ngx.HTTP_OK)
}
}
--- request
GET /t
--- response_body
--- error_code: 200
| 26.201613 | 123 | 0.640505 |
ed961b285e0a527139ceb5613d2ca03265c9753f | 1,025 | pm | Perl | lib/Google/Ads/GoogleAds/V8/Resources/GenderView.pm | googleads/google-ads-perl | 3ee6c09e11330fea1e6a0c9ee9f837e5e36d8177 | [
"Apache-2.0"
] | 19 | 2019-06-21T00:43:57.000Z | 2022-03-29T14:23:01.000Z | lib/Google/Ads/GoogleAds/V8/Resources/GenderView.pm | googleads/google-ads-perl | 3ee6c09e11330fea1e6a0c9ee9f837e5e36d8177 | [
"Apache-2.0"
] | 16 | 2020-03-04T07:44:53.000Z | 2021-12-15T23:06:23.000Z | lib/Google/Ads/GoogleAds/V8/Resources/GenderView.pm | googleads/google-ads-perl | 3ee6c09e11330fea1e6a0c9ee9f837e5e36d8177 | [
"Apache-2.0"
] | 9 | 2020-02-28T03:00:48.000Z | 2021-11-10T14:23:02.000Z | # Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V8::Resources::GenderView;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {resourceName => $args->{resourceName}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| 29.285714 | 79 | 0.73561 |
ed805c2db51cc409f32a8da44a5ece5be724522a | 13,035 | pm | Perl | lib/MongoDB/GridFS.pm | kainwinterheart/mongo-perl-driver | 5fb7e0100e2ababe0ede10d4829ad9ade773fb13 | [
"Apache-2.0"
] | null | null | null | lib/MongoDB/GridFS.pm | kainwinterheart/mongo-perl-driver | 5fb7e0100e2ababe0ede10d4829ad9ade773fb13 | [
"Apache-2.0"
] | null | null | null | lib/MongoDB/GridFS.pm | kainwinterheart/mongo-perl-driver | 5fb7e0100e2ababe0ede10d4829ad9ade773fb13 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2009-2013 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package MongoDB::GridFS;
# ABSTRACT: A file storage utility
use version;
our $VERSION = 'v0.999.999.2'; # TRIAL
use MongoDB::GridFS::File;
use DateTime 0.78; # drops dependency on bug-prone Math::Round
use Digest::MD5;
use Moose;
use MongoDB::Error;
use MongoDB::WriteConcern;
use MongoDB::_Types -types;
use Types::Standard -types;
use namespace::clean -except => 'meta';
=attr chunk_size
The number of bytes per chunk. Defaults to 261120 (255kb).
=cut
$MongoDB::GridFS::chunk_size = 261120;
has _database => (
is => 'ro',
isa => InstanceOf['MongoDB::Database'],
required => 1,
);
=attr read_preference
A L<MongoDB::ReadPreference> object. It may be initialized with a string
corresponding to one of the valid read preference modes or a hash reference
that will be coerced into a new MongoDB::ReadPreference object.
By default it will be inherited from a L<MongoDB::Database> object.
=cut
has read_preference => (
is => 'ro',
isa => ReadPreference,
required => 1,
coerce => 1,
);
=attr write_concern
A L<MongoDB::WriteConcern> object. It may be initialized with a hash
reference that will be coerced into a new MongoDB::WriteConcern object.
By default it will be inherited from a L<MongoDB::Database> object.
=cut
has write_concern => (
is => 'ro',
isa => WriteConcern,
required => 1,
coerce => 1,
);
=attr max_time_ms
Specifies the default maximum amount of time in milliseconds that the
server should use for working on a query.
B<Note>: this will only be used for server versions 2.6 or greater, as that
was when the C<$maxTimeMS> meta-operator was introduced.
=cut
has max_time_ms => (
is => 'ro',
isa => NonNegNum,
required => 1,
);
=attr bson_codec
An object that provides the C<encode_one> and C<decode_one> methods, such
as from L<MongoDB::BSON>. It may be initialized with a hash reference that
will be coerced into a new MongoDB::BSON object. By default it will be
inherited from a L<MongoDB::Database> object.
=cut
has bson_codec => (
is => 'ro',
isa => BSONCodec,
coerce => 1,
required => 1,
);
=attr prefix
The prefix used for the collections. Defaults to "fs".
=cut
has prefix => (
is => 'ro',
isa => Str,
default => 'fs'
);
has files => (
is => 'ro',
isa => InstanceOf['MongoDB::Collection'],
lazy_build => 1
);
sub _build_files {
my $self = shift;
my $coll = $self->_database->get_collection(
$self->prefix . '.files',
{
read_preference => $self->read_preference,
write_concern => $self->write_concern,
max_time_ms => $self->max_time_ms,
bson_codec => $self->bson_codec,
}
);
return $coll;
}
has chunks => (
is => 'ro',
isa => InstanceOf['MongoDB::Collection'],
lazy_build => 1
);
sub _build_chunks {
my $self = shift;
my $coll = $self->_database->get_collection(
$self->prefix . '.chunks',
{
read_preference => $self->read_preference,
write_concern => $self->write_concern,
max_time_ms => $self->max_time_ms,
}
);
return $coll;
}
# This checks if the required indexes for GridFS exist in for the current database.
# If they are not found, they will be created.
sub BUILD {
my ($self) = @_;
$self->_ensure_indexes();
return;
}
sub _ensure_indexes {
my ($self) = @_;
# ensure the necessary index is present (this may be first usage)
$self->files->ensure_index(Tie::IxHash->new(filename => 1), {"safe" => 1});
$self->chunks->ensure_index(Tie::IxHash->new(files_id => 1, n => 1), {"safe" => 1, "unique" => 1});
}
=method get
$file = $grid->get($id);
Get a file from GridFS based on its _id. Returns a L<MongoDB::GridFS::File>.
To retrieve a file based on metadata like C<filename>, use the L</find_one>
method instead.
=cut
sub get {
my ($self, $id) = @_;
return $self->find_one({_id => $id});
}
=method put
$id = $grid->put($fh, $metadata);
$id = $grid->put($fh, {filename => "pic.jpg"});
Inserts a file into GridFS, adding a L<MongoDB::OID> as the _id field if the
field is not already defined. This is a wrapper for C<MongoDB::GridFS::insert>,
see that method below for more information.
Returns the _id field.
=cut
sub put {
my ($self, $fh, $metadata) = @_;
return $self->insert($fh, $metadata, {safe => 1});
}
=method delete
$grid->delete($id)
Removes the file with the given _id. Will die if the remove is unsuccessful.
Does not return anything on success.
=cut
sub delete {
my ($self, $id) = @_;
$self->remove({_id => $id}, {safe => 1});
}
=method find_one
$file = $grid->find_one({"filename" => "foo.txt"});
$file = $grid->find_one($criteria, $fields);
Returns a matching MongoDB::GridFS::File or undef.
=cut
sub find_one {
my ($self, $criteria, $fields) = @_;
$criteria ||= {};
my $file = $self->files->find_one($criteria, $fields);
return undef unless $file;
return MongoDB::GridFS::File->new({_grid => $self,info => $file});
}
=method remove
$grid->remove({"filename" => "foo.txt"});
$grid->remove({"filename" => "foo.txt"}, $options);
Cleanly removes files from the database. C<$options> is a hash of options for
the remove.
A hashref of options may be provided with the following keys:
=for :list
* C<just_one>: If true, only one file matching the criteria will be removed.
* C<safe>: (DEPRECATED) If true, each remove will be checked for success and
die on failure. Set the L</write_concern> attribute instead.
This method doesn't return anything.
=cut
sub remove {
my ( $self, $criteria, $options ) = @_;
$options ||= {};
my $chunks =
exists $options->{safe}
? $self->chunks->clone( write_concern => $self->_dynamic_write_concern($options) )
: $self->chunks;
my $files =
exists $options->{safe}
? $self->files->clone( write_concern => $self->_dynamic_write_concern($options) )
: $self->files;
if ( $options->{just_one} ) {
my $meta = $files->find_one($criteria);
$chunks->delete_many( { "files_id" => $meta->{'_id'} } );
$files->delete_one( { "_id" => $meta->{'_id'} } );
}
else {
my $cursor = $files->find($criteria);
while ( my $meta = $cursor->next ) {
$chunks->delete_many( { "files_id" => $meta->{'_id'} } );
}
$files->delete_many($criteria);
}
return;
}
=method insert
$id = $gridfs->insert($fh);
$id = $gridfs->insert($fh, $metadata);
$id = $gridfs->insert($fh, $metadata, $options);
$id = $gridfs->insert($fh, {"content-type" => "text/html"});
Reads from a file handle into the database. Saves the file with the given
metadata. The file handle must be readable.
A hashref of options may be provided with the following keys:
=for :list
* C<safe>: (DEPRECATED) Will do safe inserts and check the MD5 hash calculated
by the database against an MD5 hash calculated by the local filesystem. If
the two hashes do not match, then the chunks already inserted will be removed
and the program will die. Set the L</write_concern> attribute instead.
Because C<MongoDB::GridFS::insert> takes a file handle, it can be used to insert
very long strings into the database (as well as files). C<$fh> must be a
FileHandle (not just the native file handle type), so you can insert a string
with:
# open the string like a file
open($basic_fh, '<', \$very_long_string);
# turn the file handle into a FileHandle
$fh = FileHandle->new;
$fh->fdopen($basic_fh, 'r');
$gridfs->insert($fh);
=cut
sub insert {
my ($self, $fh, $metadata, $options) = @_;
$options ||= {};
MongoDB::UsageError->throw("not a file handle") unless $fh;
$metadata = {} unless $metadata && ref $metadata eq 'HASH';
my $chunks =
exists $options->{safe}
? $self->chunks->clone( write_concern => $self->_dynamic_write_concern($options) )
: $self->chunks;
my $files =
exists $options->{safe}
? $self->files->clone( write_concern => $self->_dynamic_write_concern($options) )
: $self->files;
my $start_pos = $fh->getpos();
my $id;
if (exists $metadata->{"_id"}) {
$id = $metadata->{"_id"};
}
else {
$id = MongoDB::OID->new;
}
my $n = 0;
my $length = 0;
while ((my $len = $fh->read(my $data, $MongoDB::GridFS::chunk_size)) != 0) {
$chunks->insert_one({"files_id" => $id,
"n" => $n,
"data" => bless(\$data)});
$n++;
$length += $len;
}
$fh->setpos($start_pos);
my %copy = %{$metadata};
# compare the md5 hashes
if ($files->write_concern->is_acknowledged) {
# get an md5 hash for the file. set the retry flag to 'true' incase the
# database, collection, or indexes are missing. That way we can recreate them
# retry the md5 calc.
my $result = $self->_database->run_command([filemd5 => $id, root => $self->prefix]);
$copy{"md5"} = $result->{"md5"};
my $md5 = Digest::MD5->new;
$md5->addfile($fh);
$fh->setpos($start_pos);
my $digest = $md5->hexdigest;
if ($digest ne $result->{md5}) {
# cleanup and die
$chunks->delete_many({files_id => $id});
MongoDB::GridFSError->throw(
"md5 hashes don't match: database got $result->{md5}, fs got $digest" );
}
}
$copy{"_id"} = $id;
$copy{"chunkSize"} = $MongoDB::GridFS::chunk_size;
$copy{"uploadDate"} = DateTime->now;
$copy{"length"} = $length;
return $files->insert_one(\%copy)->inserted_id;
}
=method drop
$grid->drop;
Removes all files' metadata and contents.
=cut
sub drop {
my ($self) = @_;
$self->files->drop;
$self->chunks->drop;
$self->_ensure_indexes;
}
=head2 all
@files = $grid->all;
Returns a list of the files in the database as L<MongoDB::GridFS::File>
objects.
=cut
sub all {
my ($self) = @_;
my @ret;
my $cursor = $self->files->find({});
while (my $meta = $cursor->next) {
push @ret, MongoDB::GridFS::File->new(
_grid => $self,
info => $meta);
}
return @ret;
}
#--------------------------------------------------------------------------#
# private methods
#--------------------------------------------------------------------------#
sub _dynamic_write_concern {
my ( $self, $opts ) = @_;
my $wc = $self->write_concern;
if ( !exists $opts->{safe} ) {
return $wc;
}
elsif ( $opts->{safe} ) {
return $wc->is_acknowledged ? $wc : MongoDB::WriteConcern->new( w => 1 );
}
else {
return MongoDB::WriteConcern->new( w => 0 );
}
}
__PACKAGE__->meta->make_immutable;
1;
=head1 SYNOPSIS
my $grid = $database->get_gridfs;
my $fh = IO::File->new("myfile", "r");
$grid->insert($fh, {"filename" => "mydbfile"});
=head1 DESCRIPTION
This class models a GridFS file store in a MongoDB database and provides an API
for interacting with it.
Generally, you never construct one of these directly with C<new>. Instead, you
call C<get_gridfs> on a L<MongoDB::Database> object.
=head1 USAGE
=head2 API
There are two interfaces for GridFS: a file-system/collection-like interface
(insert, remove, drop, find_one) and a more general interface
(get, put, delete). Their functionality is the almost identical (get, put and
delete are always safe ops, insert, remove, and find_one are optionally safe),
using one over the other is a matter of preference.
=head2 Error handling
Unless otherwise explictly documented, all methods throw exceptions if
an error occurs. The error types are documented in L<MongoDB::Error>.
To catch and handle errors, the L<Try::Tiny> and L<Safe::Isa> modules
are recommended:
use Try::Tiny;
use Safe::Isa; # provides $_isa
try {
$grid->get( $id )
}
catch {
if ( $_->$_isa("MongoDB::TimeoutError" ) {
...
}
else {
...
}
};
To retry failures automatically, consider using L<Try::Tiny::Retry>.
=head1 SEE ALSO
Core documentation on GridFS: L<http://dochub.mongodb.org/core/gridfs>.
=cut
| 25.261628 | 103 | 0.61074 |
ed6482e7491afab7cc3d15c433a511c5df708a02 | 12,118 | pm | Perl | modules/Bio/EnsEMBL/Variation/Pipeline/ReleaseDataDumps/JoinDump.pm | fergalmartin/ensembl-variation | 858de3ee083fd066bc0b8a78e8a449176dd51bce | [
"Apache-2.0"
] | null | null | null | modules/Bio/EnsEMBL/Variation/Pipeline/ReleaseDataDumps/JoinDump.pm | fergalmartin/ensembl-variation | 858de3ee083fd066bc0b8a78e8a449176dd51bce | [
"Apache-2.0"
] | 1 | 2020-04-20T12:11:56.000Z | 2020-04-20T12:11:56.000Z | modules/Bio/EnsEMBL/Variation/Pipeline/ReleaseDataDumps/JoinDump.pm | dglemos/ensembl-variation | 7cd20531835b45b1842476606b4fd0856e3843e0 | [
"Apache-2.0"
] | null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
package Bio::EnsEMBL::Variation::Pipeline::ReleaseDataDumps::JoinDump;
use strict;
use warnings;
use FileHandle;
use base ('Bio::EnsEMBL::Variation::Pipeline::ReleaseDataDumps::BaseDataDumpsProcess');
=begin
Different file joins:
- join slice pieces to seq_region
- join seq_regions to complete dump file
- adjust header lines in dump file:
- GVF: get correct sequence_regions
=end
=cut
sub run {
my $self = shift;
my $species = $self->param('species');
my $file_type = $self->param('file_type');
my $config = $self->param('config');
my $division = $self->param('species_division');
my $pipeline_dir = $self->data_dir($species);
my $working_dir = "$pipeline_dir/$file_type/$species/";
my $mode = $self->param('mode');
if ($mode eq 'join_slice_split') {
my $files = $self->get_slice_split_files($working_dir, $file_type);
$self->join_split_slice_files($working_dir, $files);
$self->dataflow_output_id({}, 2);
$self->dataflow_output_id({}, 1);
} elsif ($mode eq 'final_join') {
$self->final_join;
} elsif ($mode eq 'no_join') {
$self->warning('No final join required');
} else {
die "Unknown mode: $mode in JoinDump";
}
}
sub final_join {
my $self = shift;
my $file_type = $self->param('file_type');
if ($file_type eq 'gvf') {
$self->final_join_gvf();
} else {
$self->final_join_vcf();
}
}
sub final_join_gvf {
my $self = shift;
my $dir = $self->param('dir');
my $file_name = $self->param('file_name');
my $dump_type = $self->param('dump_type');
my @input_ids = sort @{$self->param('input_ids')};
my $tmp_dir = $self->param('tmp_dir');
my $covered_seq_region_ids = $self->get_covered_seq_regions;
my $species = $self->param('species');
my $cdba = $self->get_species_adaptor($species, 'core');
my $sa = $cdba->get_SliceAdaptor;
my @sequence_regions = {};
foreach my $seq_region_id (keys %$covered_seq_region_ids) {
my $slice = $sa->fetch_by_seq_region_id($seq_region_id);
push @sequence_regions, {
'name' => $slice->seq_region_name,
'start' => $slice->start,
'end' => $slice->end,
};
}
my $first_file_id = $input_ids[0];
my $fh_join = FileHandle->new("$dir/$file_name.gvf", 'w');
`gunzip $dir/$dump_type-$first_file_id.gvf.gz`;
my $fh = FileHandle->new("$dir/$dump_type-$first_file_id.gvf", 'r');
# print the header first
while (<$fh>) {
chomp;
my $line = $_;
if ($line =~ m/^#/) {
next if ($line =~ m/^##sequence-region/);
print $fh_join $line, "\n";
}
}
$fh->close();
`gzip $dir/$dump_type-$first_file_id.gvf`;
foreach my $sequence_region (@sequence_regions) {
if ($sequence_region->{name} && $sequence_region->{start} && $sequence_region->{end}) {
print $fh_join join(' ', '##sequence-region', $sequence_region->{name}, $sequence_region->{start}, $sequence_region->{end}), "\n";
}
}
my $id_count = 1;
foreach my $file_id (@input_ids) {
`gunzip $dir/$dump_type-$file_id.gvf.gz`;
my $fh = FileHandle->new("$dir/$dump_type-$file_id.gvf", 'r');
while (<$fh>) {
chomp;
my $line = $_;
next if ($line =~ m/^#/);
my $gvf_line = get_gvf_line($line, $id_count);
print $fh_join $gvf_line, "\n";
$id_count++;
}
$fh->close();
`gzip $dir/$dump_type-$file_id.gvf`;
`mv $dir/$dump_type-$file_id.gvf.gz $tmp_dir`;
}
$fh_join->close();
}
sub final_join_vcf {
my $self = shift;
my $dir = $self->param('dir');
my $file_name = $self->param('file_name');
my $dump_type = $self->param('dump_type');
my @input_ids = sort @{$self->param('input_ids')};
my $first_file_id = shift @input_ids;
my $joined_fh = FileHandle->new("$dir/$file_name.vcf", 'w');
`gunzip $dir/$dump_type-$first_file_id.vcf.gz`;
my $fh = FileHandle->new("$dir/$dump_type-$first_file_id.vcf", 'r');
while (<$fh>) {
chomp;
print $joined_fh $_, "\n";
}
$fh->close();
`rm $dir/$dump_type-$first_file_id.vcf`;
foreach my $file_id (@input_ids) {
`gunzip $dir/$dump_type-$file_id.vcf.gz`;
my $fh = FileHandle->new("$dir/$dump_type-$file_id.vcf", 'r');
while (<$fh>) {
chomp;
my $line = $_;
next if ($line =~ m/^#/);
print $joined_fh $line, "\n";
}
$fh->close();
`rm $dir/$dump_type-$file_id.vcf`;
}
$joined_fh->close();
my $vcf_file = "$dir/$file_name.vcf";
my $cmd = "vcf-sort < $vcf_file | bgzip > $vcf_file.gz";
$self->run_cmd($cmd);
`rm $vcf_file`;
}
sub get_slice_split_files {
my ($self, $working_dir, $file_type) = @_;
my $files = {};
my ($split_slice_range, $file_name);
opendir(my $dh, $working_dir) or die $!;
my @dir_content = readdir($dh);
closedir($dh);
foreach my $file (@dir_content) {
next if ($file =~ m/^\./);
if ($file =~ m/\.$file_type/) {
$file =~ s/\.$file_type//g;
my @file_name_components = split('-', $file);
if (scalar @file_name_components == 2) {
$file_name = shift @file_name_components;
$split_slice_range = shift @file_name_components;
my @components = split('_', $split_slice_range);
if (scalar @components == 3) {
my ($seq_region_id, $start, $end) = @components;
$files->{$file_name}->{$seq_region_id}->{$start} = "$file_name-$seq_region_id\_$start\_$end.$file_type";
}
}
} # else .err and .out files
}
return $files;
}
sub join_split_slice_files {
my ($self, $working_dir, $files) = @_;
my $species = $self->param('species');
my $seq_region_id2name = {};
my $sequence_regions = {};
if ($species eq 'homo_sapiens') {
my $cdba = $self->get_species_adaptor($species, 'core');
my $sa = $cdba->get_SliceAdaptor;
my $slices = $sa->fetch_all('chromosome');
foreach my $slice (@$slices) {
$seq_region_id2name->{$slice->get_seq_region_id} = $slice->seq_region_name;
my $seq_region_id = $slice->get_seq_region_id;
$sequence_regions->{$seq_region_id}->{start} = $slice->start;
$sequence_regions->{$seq_region_id}->{end} = $slice->end;
$sequence_regions->{$seq_region_id}->{name} = $slice->seq_region_name;
}
}
my $tmp_dir = $self->param('tmp_dir');
foreach my $file_type (keys %$files) {
foreach my $seq_region_id (keys %{$files->{$file_type}}) {
my $id_count = 1;
my @start_positions = sort keys %{$files->{$file_type}->{$seq_region_id}};
my $fh_join;
if ($species eq 'homo_sapiens') {
my $seq_region_name = $seq_region_id2name->{$seq_region_id};
if ($seq_region_name) {
$fh_join = FileHandle->new("$working_dir/$file_type-chr$seq_region_name.gvf", 'w');
} else {
$self->warning("No seq_region_name for $seq_region_id");
$fh_join = FileHandle->new("$working_dir/$file_type-$seq_region_id.gvf", 'w');
}
} else {
$fh_join = FileHandle->new("$working_dir/$file_type-$seq_region_id.gvf", 'w');
}
my $first_start_position = $start_positions[0];
my $file_name = $files->{$file_type}->{$seq_region_id}->{$first_start_position};
my $fh = FileHandle->new("$working_dir/$file_name", 'r');
while (<$fh>) {
chomp;
my $line = $_;
if ($line =~ m/^#/) {
next if ($line =~ m/^##sequence-region/);
print $fh_join $line, "\n";
}
}
my $name = $sequence_regions->{$seq_region_id}->{name};
my $start = $sequence_regions->{$seq_region_id}->{start};
my $end = $sequence_regions->{$seq_region_id}->{end};
print $fh_join join(' ', '##sequence-region', $name, $start, $end), "\n";
$fh->close();
foreach my $start_position (@start_positions) {
my $file_name = $files->{$file_type}->{$seq_region_id}->{$start_position};
my $fh = FileHandle->new("$working_dir/$file_name", 'r');
while (<$fh>) {
chomp;
my $line = $_;
next if ($line =~ m/^#/);
my $gvf_line = get_gvf_line($line, $id_count);
print $fh_join $gvf_line, "\n";
$id_count++;
}
$fh->close();
`gzip $working_dir/$file_name`;
`mv $working_dir/$file_name.gz $tmp_dir`;
}
$fh_join->close();
}
}
}
sub get_gvf_line {
my ($line, $id_count) = @_;
my $gvf_line = {};
my @header_names = qw/seq_id source type start end score strand phase/;
my @header_values = split(/\t/, $line);
my $attrib = pop @header_values;
for my $i (0 .. $#header_names) {
$gvf_line->{$header_names[$i]} = $header_values[$i];
}
my @attributes = split(';', $attrib);
foreach my $attribute (@attributes) {
my ($key, $value) = split('=', $attribute);
if ($value) {
$gvf_line->{attributes}->{$key} = $value;
}
}
$gvf_line->{attributes}->{ID} = $id_count;
$line = join("\t", map {$gvf_line->{$_}} (
'seq_id',
'source',
'type',
'start',
'end',
'score',
'strand',
'phase'));
my $attributes = join(";", map{"$_=$gvf_line->{attributes}->{$_}"} keys %{$gvf_line->{attributes}});
return "$line\t$attributes";
}
sub run_cmd {
my ($self ,$cmd) = @_;
if (my $return_value = system($cmd)) {
$return_value >>= 8;
die "system($cmd) failed: $return_value";
}
}
=begin
sub get_covered_seq_regions {
my $self = shift;
my $species = $self->param('species');
my $counts;
my $vdba = $self->get_species_adaptor($species, 'variation');
my $dbh = $vdba->dbc->db_handle;
my $sth = $dbh->prepare(qq{
SELECT sr.seq_region_id, count(*)
FROM seq_region sr, variation_feature vf
WHERE sr.seq_region_id = vf.seq_region_id
GROUP BY sr.seq_region_id;
});
$sth->{'mysql_use_result'} = 1;
$sth->execute();
my ($slice_id, $count);
$sth->bind_columns(\$slice_id, \$count);
while ($sth->fetch()) {
if ($count > 0) {
$counts->{$slice_id} = $count;
}
}
$sth->finish();
return $counts;
}
=end
=cut
sub get_covered_seq_regions {
my $self = shift;
my $species = $self->param('species');
my $counts;
my $vdba = $self->get_species_adaptor($species, 'variation');
my $cdba = $self->get_species_adaptor($species, 'core');
my $toplevel_seq_region_ids = {};
my $sa = $cdba->get_SliceAdaptor;
my $toplevel_slices = $sa->fetch_all('toplevel');
foreach my $toplevel_slice (@$toplevel_slices) {
$toplevel_seq_region_ids->{$toplevel_slice->get_seq_region_id} = 1;
}
my $dbh = $vdba->dbc->db_handle;
my $sth = $dbh->prepare(qq{
SELECT sr.seq_region_id, count(*)
FROM seq_region sr, variation_feature vf
WHERE sr.seq_region_id = vf.seq_region_id
GROUP BY sr.seq_region_id;
});
$sth->{'mysql_use_result'} = 1;
$sth->execute();
my ($slice_id, $count);
$sth->bind_columns(\$slice_id, \$count);
while ($sth->fetch()) {
if ($count > 0) {
if ($toplevel_seq_region_ids->{$slice_id}) {
$counts->{$slice_id} = $count;
}
}
}
$sth->finish();
return $counts;
}
sub write_output {
my $self = shift;
$self->dataflow_output_id($self->param('input_for_validation'), 1);
return;
}
1;
| 29.2 | 136 | 0.610497 |
edcd023fbdea05a98112b492a966c19e8a492593 | 5,869 | pm | Perl | lib/sdk/Com/Vmware/Vcenter/Ovf/OvfWarning.pm | bince-criticalcase/vsphere-automation-sdk-perl | da3330bf66dc6c853e9a23062146d54afc299955 | [
"MIT"
] | 26 | 2017-04-24T19:20:08.000Z | 2021-12-06T23:15:09.000Z | lib/sdk/Com/Vmware/Vcenter/Ovf/OvfWarning.pm | bince-criticalcase/vsphere-automation-sdk-perl | da3330bf66dc6c853e9a23062146d54afc299955 | [
"MIT"
] | 7 | 2017-05-25T04:49:56.000Z | 2020-10-12T09:13:16.000Z | lib/sdk/Com/Vmware/Vcenter/Ovf/OvfWarning.pm | DamonLiang2021/vsphere-automation-sdk-perl | da3330bf66dc6c853e9a23062146d54afc299955 | [
"MIT"
] | 11 | 2017-05-05T11:52:12.000Z | 2021-12-06T23:14:59.000Z | ## @class Com::Vmware::Vcenter::Ovf::OvfWarning
#
#
# The ``Com::Vmware::Vcenter::Ovf::OvfWarning`` *class* describes a warning related
# to accessing, validating, deploying, or exporting an OVF package.
package Com::Vmware::Vcenter::Ovf::OvfWarning;
#
# Base class
#
use base qw(Com::Vmware::Vapi::Bindings::VapiStruct);
#
# vApi modules
#
use Com::Vmware::Vapi::Data::UnionValidator;
## @method new ()
# Constructor to initialize the Com::Vmware::Vcenter::Ovf::OvfWarning structure
#
# @retval
# Blessed object
#
sub new {
my ($class, %args) = @_;
$class = ref($class) || $class;
my $validatorList = [];
$validatorList = [
new Com::Vmware::Vapi::Data::UnionValidator(
'discriminant_name' => 'category',
'case_map' => {
'VALIDATION' => ['issues'],
'INPUT' => ['name', 'value', 'message'],
'SERVER' => ['error'],
}),
];
my $self = $class->SUPER::new('validator_list' => $validatorList, %args);
$self->{category} = $args{'category'};
$self->{issues} = $args{'issues'};
$self->{name} = $args{'name'};
$self->{value} = $args{'value'};
$self->{message} = $args{'message'};
$self->{error} = $args{'error'};
$self->set_binding_class('binding_class' => 'Com::Vmware::Vcenter::Ovf::OvfWarning');
$self->set_binding_name('name' => 'com.vmware.vcenter.ovf.ovf_warning');
$self->set_binding_field('key' => 'category', 'value' => new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vcenter::Ovf', 'type_name' => 'OvfMessage::Category'));
$self->set_binding_field('key' => 'issues', 'value' => new Com::Vmware::Vapi::Bindings::Type::OptionalType('element_type' => new Com::Vmware::Vapi::Bindings::Type::ListType(new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vcenter::Ovf', 'type_name' => 'ParseIssue'))));
$self->set_binding_field('key' => 'name', 'value' => new Com::Vmware::Vapi::Bindings::Type::OptionalType('element_type' => new Com::Vmware::Vapi::Bindings::Type::StringType()));
$self->set_binding_field('key' => 'value', 'value' => new Com::Vmware::Vapi::Bindings::Type::OptionalType('element_type' => new Com::Vmware::Vapi::Bindings::Type::StringType()));
$self->set_binding_field('key' => 'message', 'value' => new Com::Vmware::Vapi::Bindings::Type::OptionalType('element_type' => new Com::Vmware::Vapi::Bindings::Type::ReferenceType('module_ctx' => 'Com::Vmware::Vapi::Std', 'type_name' => 'LocalizableMessage')));
$self->set_binding_field('key' => 'error', 'value' => new Com::Vmware::Vapi::Bindings::Type::OptionalType('element_type' => new Com::Vmware::Vapi::Bindings::Type::DynamicStructType()));
bless $self, $class;
return $self;
}
## @method get_category ()
# Gets the value of 'category' property.
#
# @retval category - The current value of the field.
# The message category.
#
# Category#
sub get_category {
my ($self, %args) = @_;
return $self->{'category'};
}
## @method set_category ()
# Sets the given value for 'category' property.
#
# @param category - New value for the field.
# The message category.
#
sub set_category {
my ($self, %args) = @_;
$self->{'category'} = $args{'category'};
return;
}
## @method get_issues ()
# Gets the value of 'issues' property.
#
# @retval issues - The current value of the field.
# *List* of parse issues (see class Com::Vmware::Vcenter::Ovf::ParseIssue ).
#
# optional#
sub get_issues {
my ($self, %args) = @_;
return $self->{'issues'};
}
## @method set_issues ()
# Sets the given value for 'issues' property.
#
# @param issues - New value for the field.
# *List* of parse issues (see class Com::Vmware::Vcenter::Ovf::ParseIssue ).
#
sub set_issues {
my ($self, %args) = @_;
$self->{'issues'} = $args{'issues'};
return;
}
## @method get_name ()
# Gets the value of 'name' property.
#
# @retval name - The current value of the field.
# The name of input parameter.
#
# optional#
sub get_name {
my ($self, %args) = @_;
return $self->{'name'};
}
## @method set_name ()
# Sets the given value for 'name' property.
#
# @param name - New value for the field.
# The name of input parameter.
#
sub set_name {
my ($self, %args) = @_;
$self->{'name'} = $args{'name'};
return;
}
## @method get_value ()
# Gets the value of 'value' property.
#
# @retval value - The current value of the field.
# The value of input parameter.
#
# optional#
sub get_value {
my ($self, %args) = @_;
return $self->{'value'};
}
## @method set_value ()
# Sets the given value for 'value' property.
#
# @param value - New value for the field.
# The value of input parameter.
#
sub set_value {
my ($self, %args) = @_;
$self->{'value'} = $args{'value'};
return;
}
## @method get_message ()
# Gets the value of 'message' property.
#
# @retval message - The current value of the field.
# A localizable message.
#
# optional#
sub get_message {
my ($self, %args) = @_;
return $self->{'message'};
}
## @method set_message ()
# Sets the given value for 'message' property.
#
# @param message - New value for the field.
# A localizable message.
#
sub set_message {
my ($self, %args) = @_;
$self->{'message'} = $args{'message'};
return;
}
## @method get_error ()
# Gets the value of 'error' property.
#
# @retval error - The current value of the field.
# Represents a server class Com::Vmware::Vapi::Std::Errors::Error .
#
# optional#
sub get_error {
my ($self, %args) = @_;
return $self->{'error'};
}
## @method set_error ()
# Sets the given value for 'error' property.
#
# @param error - New value for the field.
# Represents a server class Com::Vmware::Vapi::Std::Errors::Error .
#
sub set_error {
my ($self, %args) = @_;
$self->{'error'} = $args{'error'};
return;
}
1;
| 28.08134 | 306 | 0.627364 |
edb3d0c49604983df6c406d93a54b2235560455c | 5,149 | pm | Perl | gorgone/modules/centreon/statistics/hooks.pm | centreon-lab/centreon-gorgone | bb29c8749af5ff0563205fad5cb3b3b1c7dc3cd7 | [
"Apache-2.0"
] | null | null | null | gorgone/modules/centreon/statistics/hooks.pm | centreon-lab/centreon-gorgone | bb29c8749af5ff0563205fad5cb3b3b1c7dc3cd7 | [
"Apache-2.0"
] | null | null | null | gorgone/modules/centreon/statistics/hooks.pm | centreon-lab/centreon-gorgone | bb29c8749af5ff0563205fad5cb3b3b1c7dc3cd7 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package gorgone::modules::centreon::statistics::hooks;
use warnings;
use strict;
use gorgone::class::core;
use gorgone::standard::constants qw(:all);
use gorgone::modules::centreon::statistics::class;
use constant NAMESPACE => 'centreon';
use constant NAME => 'statistics';
use constant EVENTS => [
{ event => 'STATISTICSREADY' },
{ event => 'STATISTICSLISTENER' },
{ event => 'BROKERSTATS', uri => '/broker', method => 'GET' },
{ event => 'ENGINESTATS', uri => '/engine', method => 'GET' }
];
my $config_core;
my $config;
my $config_db_centreon;
my $config_db_centstorage;
my $statistics = {};
my $stop = 0;
sub register {
my (%options) = @_;
$config = $options{config};
$config_core = $options{config_core};
$config_db_centreon = $options{config_db_centreon};
$config_db_centstorage = $options{config_db_centstorage};
$config->{broker_cache_dir} = defined($config->{broker_cache_dir}) ?
$config->{broker_cache_dir} : '/var/cache/centreon/broker-stats/';
$config->{engine_stats_dir} = defined($config->{config}->{engine_stats_dir}) ?
$config->{config}->{engine_stats_dir} : "/var/lib/centreon/nagios-perf/";
$config->{interval} = defined($config->{interval}) ? $config->{interval} : 300;
$config->{length} = defined($config->{length}) ? $config->{length} : 365;
$config->{number} = $config->{length} * 24 * 60 * 60 / $config->{interval};
$config->{heartbeat_factor} = defined($config->{heartbeat_factor}) ? $config->{heartbeat_factor} : 10;
$config->{heartbeat} = $config->{interval} * $config->{heartbeat_factor};
return (1, NAMESPACE, NAME, EVENTS);
}
sub init {
my (%options) = @_;
create_child(logger => $options{logger});
}
sub routing {
my (%options) = @_;
if ($options{action} eq 'STATISTICSREADY') {
$statistics->{ready} = 1;
return undef;
}
if (gorgone::class::core::waiting_ready(ready => \$statistics->{ready}) == 0) {
gorgone::standard::library::add_history(
dbh => $options{dbh},
code => GORGONE_ACTION_FINISH_KO,
token => $options{token},
data => { msg => 'gorgonestatistics: still no ready' },
json_encode => 1
);
return undef;
}
$options{gorgone}->send_internal_message(
identity => 'gorgone-statistics',
action => $options{action},
data => $options{data},
token => $options{token}
);
}
sub gently {
my (%options) = @_;
$stop = 1;
if (defined($statistics->{running}) && $statistics->{running} == 1) {
$options{logger}->writeLogDebug("[statistics] Send TERM signal $statistics->{pid}");
CORE::kill('TERM', $statistics->{pid});
}
}
sub kill {
my (%options) = @_;
if ($statistics->{running} == 1) {
$options{logger}->writeLogDebug("[statistics] Send KILL signal for pool");
CORE::kill('KILL', $statistics->{pid});
}
}
sub kill_internal {
my (%options) = @_;
}
sub check {
my (%options) = @_;
my $count = 0;
foreach my $pid (keys %{$options{dead_childs}}) {
# Not me
next if (!defined($statistics->{pid}) || $statistics->{pid} != $pid);
$statistics = {};
delete $options{dead_childs}->{$pid};
if ($stop == 0) {
create_child(logger => $options{logger});
}
}
$count++ if (defined($statistics->{running}) && $statistics->{running} == 1);
return $count;
}
sub broadcast {
my (%options) = @_;
routing(%options);
}
# Specific functions
sub create_child {
my (%options) = @_;
$options{logger}->writeLogInfo("[statistics] Create module 'statistics' process");
my $child_pid = fork();
if ($child_pid == 0) {
$0 = 'gorgone-statistics';
my $module = gorgone::modules::centreon::statistics::class->new(
logger => $options{logger},
module_id => NAME,
config_core => $config_core,
config => $config,
config_db_centreon => $config_db_centreon,
config_db_centstorage => $config_db_centstorage,
);
$module->run();
exit(0);
}
$options{logger}->writeLogDebug("[statistics] PID $child_pid (gorgone-statistics)");
$statistics = { pid => $child_pid, ready => 0, running => 1 };
}
1;
| 29.763006 | 106 | 0.610216 |
edd88a8b107bdf0d8413b587e8c98598399dadf7 | 38,890 | t | Perl | bundle/lua-resty-core-0.1.22/t/shdict.t | khalv786/openresty-1.19.9.1 | 92b461fcc91652ef5436f213604f15fada112070 | [
"BSD-2-Clause"
] | 7 | 2017-07-29T09:02:45.000Z | 2020-10-01T05:01:54.000Z | bundle/lua-resty-core-0.1.22/t/shdict.t | khalv786/openresty-1.19.9.1 | 92b461fcc91652ef5436f213604f15fada112070 | [
"BSD-2-Clause"
] | 5 | 2019-05-15T09:28:27.000Z | 2021-09-01T04:04:22.000Z | bundle/lua-resty-core-0.1.22/t/shdict.t | khalv786/openresty-1.19.9.1 | 92b461fcc91652ef5436f213604f15fada112070 | [
"BSD-2-Clause"
] | 4 | 2018-09-10T07:42:42.000Z | 2020-10-30T15:43:49.000Z | # vim:set ft= ts=4 sw=4 et fdm=marker:
use lib '.';
use t::TestCore;
#worker_connections(1014);
#master_process_enabled(1);
#log_level('warn');
repeat_each(2);
plan tests => repeat_each() * (blocks() * 5 + 2);
add_block_preprocessor(sub {
my $block = shift;
my $http_config = $block->http_config || '';
$http_config .= <<_EOC_;
lua_shared_dict dogs 1m;
lua_shared_dict cats 16k;
lua_shared_dict birds 100k;
$t::TestCore::HttpConfig
_EOC_
$block->set_value("http_config", $http_config);
});
#no_diff();
no_long_string();
check_accum_error_log();
run_tests();
__DATA__
=== TEST 1: get a string value
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
local ok, err, forcible = dogs:set("foo", "bar", 0, 72)
if not ok then
ngx.say("failed to set: ", err)
return
end
for i = 1, 100 do
val, flags = dogs:get("foo")
end
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: string
value: bar
flags: 72
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):11 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 2: get an nonexistent key
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
-- dogs:set("foo", "bar")
for i = 1, 100 do
val, flags = dogs:get("nonexistent")
end
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: nil
value: nil
flags: nil
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 3: get a boolean value (true)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:set("foo", true, 0, 5678)
for i = 1, 100 do
val, flags = dogs:get("foo")
end
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: boolean
value: true
flags: 5678
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 4: get a boolean value (false)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:set("foo", false, 0, 777)
for i = 1, 100 do
val, flags = dogs:get("foo")
end
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: boolean
value: false
flags: 777
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 5: get a number value (int)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:set("foo", 51203)
for i = 1, 100 do
val, flags = dogs:get("foo")
end
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: number
value: 51203
flags: nil
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 6: get a number value (double)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:set("foo", 3.1415926, 0, 78)
for i = 1, 100 do
val, flags = dogs:get("foo")
end
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: number
value: 3.1415926
flags: 78
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 7: get a large string value
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:flush_all()
dogs:flush_expired()
dogs:set("foo", string.rep("bbbb", 1024) .. "a", 0, 912)
for i = 1, 100 do
val, flags = dogs:get("foo")
end
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body eval
"value type: string
value: " . ("bbbb" x 1024) . "a
flags: 912
"
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):9 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 8: get_stale (false)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags, stale
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:set("foo", "bar", 0, 72)
for i = 1, 100 do
val, flags, stale = dogs:get_stale("foo")
end
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
ngx.say("stale: ", stale)
}
}
--- request
GET /t
--- response_body
value type: string
value: bar
flags: 72
stale: false
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 9: get_stale (true)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags, stale
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
local ok, err, forcible = dogs:set("foo", "bar", 0.01, 72)
if not ok then
ngx.say("failed to set: ", err)
return
end
ngx.update_time()
ngx.sleep(0.02)
for i = 1, 30 do
val, flags, stale = dogs:get_stale("foo")
end
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
ngx.say("stale: ", stale)
}
}
--- request
GET /t
--- response_body
value type: string
value: bar
flags: 72
stale: true
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):13 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 10: incr int
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
local ok, err, forcible = dogs:set("foo", 56)
if not ok then
ngx.say("failed to set: ", err)
return
end
for i = 1, 100 do
val, err = dogs:incr("foo", 2)
end
ngx.say("value: ", val)
ngx.say("err: ", err)
}
}
--- request
GET /t
--- response_body
value: 256
err: nil
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):11 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 11: incr double
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, err
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:set("foo", 56)
for i = 1, 150 do
val, err = dogs:incr("foo", 2.1)
end
ngx.say("value: ", val)
ngx.say("err: ", err)
}
}
--- request
GET /t
--- response_body
value: 371
err: nil
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 12: set a string value
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
local ok, err, forcible
for i = 1, 100 do
ok, err, forcible = dogs:set("foo", "bar", 0, 72)
end
if not ok then
ngx.say("failed to set: ", err)
return
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: string
value: bar
flags: 72
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 13: set a boolean value (true)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
local ok, err, forcible
for i = 1, 100 do
ok, err, forcible = dogs:set("foo", true, 0, 5678)
end
if not ok then
ngx.say("failed to set: ", err)
return
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: boolean
value: true
flags: 5678
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 14: set a boolean value (false)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
for i = 1, 100 do
dogs:set("foo", false, 0, 777)
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: boolean
value: false
flags: 777
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):6 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 15: set a number value (int)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
for i = 1, 100 do
dogs:set("foo", 51203)
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: number
value: 51203
flags: nil
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):6 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 16: set a number value (double)
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
for i = 1, 100 do
dogs:set("foo", 3.1415926, 0, 78)
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: number
value: 3.1415926
flags: 78
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):6 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 17: set a number value and a nil
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
for i = 1, 150 do
dogs:set("foo", 3.1415926, 0, 78)
dogs:set("foo", nil)
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: nil
value: nil
flags: nil
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):6 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 18: safe set a number value
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:flush_all()
dogs:flush_expired()
for i = 1, 100 do
dogs:safe_set("foo", 3.1415926, 0, 78)
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: number
value: 3.1415926
flags: 78
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):8 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 19: add a string value
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:flush_all()
local ok, err, forcible
for i = 1, 100 do
ok, err, forcible = dogs:add("foo" .. i, "bar", 0, 72)
end
if not ok then
ngx.say("failed to set: ", err)
return
end
val, flags = dogs:get("foo100")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: string
value: bar
flags: 72
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):8 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 20: safe add a string value
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:flush_all()
dogs:flush_expired()
local ok, err, forcible
for i = 1, 100 do
ok, err, forcible = dogs:safe_add("foo" .. i, "bar", 0, 72)
end
if not ok then
ngx.say("failed to set: ", err)
return
end
val, flags = dogs:get("foo100")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: string
value: bar
flags: 72
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):9 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 21: replace a string value
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
dogs:set("foo", "hello")
local ok, err, forcible
for i = 1, 100 do
ok, err, forcible = dogs:replace("foo", "bar" .. i, 0, 72)
end
if not ok then
ngx.say("failed to set: ", err)
return
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: string
value: bar100
flags: 72
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):8 loop\]/
--- no_error_log
[error]
-- NYI:
=== TEST 22: set a number value and delete
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
-- local cd = ffi.cast("void *", dogs)
for i = 1, 150 do
dogs:set("foo", 3.1415926, 0, 78)
dogs:delete("foo")
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: nil
value: nil
flags: nil
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):6 loop\]/
--- no_error_log
[error]
-- NYI:
stitch
=== TEST 23: set nil key
--- config
location = /t {
content_by_lua_block {
local val, flags
local dogs = ngx.shared.dogs
local ok, err = dogs:set(nil, "bar")
if not ok then
ngx.say("failed to set: ", err)
end
}
}
--- request
GET /t
--- response_body
failed to set: nil key
--- no_error_log
[error]
[alert]
[crit]
=== TEST 24: get nil key
--- config
location = /t {
content_by_lua_block {
local val, flags
local dogs = ngx.shared.dogs
local value, err = dogs:get(nil, "bar")
if not ok then
ngx.say("failed to get: ", err)
end
}
}
--- request
GET /t
--- response_body
failed to get: nil key
--- no_error_log
[error]
[alert]
[crit]
=== TEST 25: get stale key
--- config
location = /t {
content_by_lua_block {
local val, flags
local dogs = ngx.shared.dogs
local value, err = dogs:get_stale(nil, "bar")
if not ok then
ngx.say("failed to get stale: ", err)
end
}
}
--- request
GET /t
--- response_body
failed to get stale: nil key
--- no_error_log
[error]
[alert]
[crit]
=== TEST 26: incr key
--- config
location = /t {
content_by_lua_block {
local val, flags
local dogs = ngx.shared.dogs
local value, err = dogs:incr(nil, 32)
if not value then
ngx.say("failed to incr: ", err)
end
}
}
--- request
GET /t
--- response_body
failed to incr: nil key
--- no_error_log
[error]
[alert]
[crit]
=== TEST 27: flush_all
--- config
location = /t {
content_by_lua_block {
local ffi = require "ffi"
local val, flags
local dogs = ngx.shared.dogs
dogs:set("foo", "bah")
-- local cd = ffi.cast("void *", dogs)
for i = 1, 150 do
dogs:flush_all()
end
val, flags = dogs:get("foo")
ngx.say("value type: ", type(val))
ngx.say("value: ", val)
ngx.say("flags: ", flags)
}
}
--- request
GET /t
--- response_body
value type: nil
value: nil
flags: nil
--- error_log eval
qr/\[TRACE\s+\d+ content_by_lua\(nginx\.conf:\d+\):7 loop\]/
--- no_error_log
[error]
-- NYI:
stitch
=== TEST 28: incr, value is not number
--- config
location = /t {
content_by_lua_block {
local val, flags
local dogs = ngx.shared.dogs
local value, err = dogs:incr("foo", "bar")
if not value then
ngx.say("failed to incr: ", err)
end
}
}
--- request
GET /t
--- error_code: 500
--- response_body_like: 500
--- error_log
cannot convert 'nil' to 'double'
--- no_error_log
[alert]
[crit]
=== TEST 29: incr with init
--- config
location = /t {
content_by_lua_block {
local val, flags
local dogs = ngx.shared.dogs
dogs:flush_all()
local value, err = dogs:incr("foo", 10)
if not value then
ngx.say("failed to incr: ", err)
end
local value, err, forcible = dogs:incr("foo", 10, 10)
if not value then
ngx.say("failed to incr: ", err)
return
end
ngx.say("incr ok, value: ", value, ", forcible: ", forcible)
}
}
--- request
GET /t
--- response_body
failed to incr: not found
incr ok, value: 20, forcible: false
--- no_error_log
[error]
[alert]
[crit]
=== TEST 30: incr, init is not number
--- config
location = /t {
content_by_lua_block {
local val, flags
local dogs = ngx.shared.dogs
local value, err = dogs:incr("foo", 10, "bar")
if not ok then
ngx.say("failed to incr: ", err)
end
}
}
--- request
GET /t
--- error_code: 500
--- response_body_like: 500
--- error_log
number expected, got string
--- no_error_log
[alert]
[crit]
=== TEST 31: capacity
--- config
location = /t {
content_by_lua_block {
local cats = ngx.shared.cats
local capacity = cats:capacity()
ngx.say("capacity type: ", type(capacity))
ngx.say("capacity: ", capacity)
}
}
--- request
GET /t
--- response_body
capacity type: number
capacity: 16384
--- no_error_log
[error]
[alert]
[crit]
=== TEST 32: free_space, empty (16k zone)
--- skip_nginx: 5: < 1.11.7
--- config
location = /t {
content_by_lua_block {
local cats = ngx.shared.cats
cats:flush_all()
cats:flush_expired()
local free_page_bytes = cats:free_space()
ngx.say("free_page_bytes type: ", type(free_page_bytes))
ngx.say("free_page_bytes: ", free_page_bytes)
}
}
--- request
GET /t
--- response_body
free_page_bytes type: number
free_page_bytes: 4096
--- no_error_log
[error]
[alert]
[crit]
=== TEST 33: free_space, empty (100k zone)
--- skip_nginx: 5: < 1.11.7
--- config
location = /t {
content_by_lua_block {
local birds = ngx.shared.birds
birds:flush_all()
birds:flush_expired()
local free_page_bytes = birds:free_space()
ngx.say("free_page_bytes type: ", type(free_page_bytes))
ngx.say("free_page_bytes: ", free_page_bytes)
}
}
--- request
GET /t
--- response_body_like chomp
\Afree_page_bytes type: number
free_page_bytes: (?:90112|94208)
\z
--- no_error_log
[error]
[alert]
[crit]
=== TEST 34: free_space, about half full, one page left
--- skip_nginx: 5: < 1.11.7
--- config
location = /t {
content_by_lua_block {
local cats = ngx.shared.cats
cats:flush_all()
cats:flush_expired()
for i = 1, 31 do
local key = string.format("key%05d", i)
local val = string.format("val%05d", i)
local success, err, forcible = cats:set(key, val)
if err ~= nil then
ngx.say(string.format("got error, i=%d, err=%s", i, err))
end
if forcible then
ngx.say(string.format("got forcible, i=%d", i))
end
if not success then
ngx.say(string.format("got not success, i=%d", i))
end
end
local free_page_bytes = cats:free_space()
ngx.say("free_page_bytes type: ", type(free_page_bytes))
ngx.say("free_page_bytes: ", free_page_bytes)
}
}
--- request
GET /t
--- response_body
free_page_bytes type: number
free_page_bytes: 4096
--- no_error_log
[error]
[alert]
[crit]
=== TEST 35: free_space, about half full, no page left
--- skip_nginx: 5: < 1.11.7
--- config
location = /t {
content_by_lua_block {
local cats = ngx.shared.cats
cats:flush_all()
cats:flush_expired()
for i = 1, 32 do
local key = string.format("key%05d", i)
local val = string.format("val%05d", i)
local success, err, forcible = cats:set(key, val)
if err ~= nil then
ngx.say(string.format("got error, i=%d, err=%s", i, err))
end
if forcible then
ngx.say(string.format("got forcible, i=%d", i))
end
if not success then
ngx.say(string.format("got not success, i=%d", i))
end
end
local free_page_bytes = cats:free_space()
ngx.say("free_page_bytes type: ", type(free_page_bytes))
ngx.say("free_page_bytes: ", free_page_bytes)
}
}
--- request
GET /t
--- response_body_like chomp
\Afree_page_bytes type: number
free_page_bytes: (?:0|4096)
\z
--- no_error_log
[error]
[alert]
[crit]
=== TEST 36: free_space, full
--- skip_nginx: 5: < 1.11.7
--- config
location = /t {
content_by_lua_block {
local cats = ngx.shared.cats
cats:flush_all()
cats:flush_expired()
for i = 1, 63 do
local key = string.format("key%05d", i)
local val = string.format("val%05d", i)
local success, err, forcible = cats:set(key, val)
if err ~= nil then
ngx.say(string.format("got error, i=%d, err=%s", i, err))
end
if forcible then
ngx.say(string.format("got forcible, i=%d", i))
end
if not success then
ngx.say(string.format("got not success, i=%d", i))
end
end
local free_page_bytes = cats:free_space()
ngx.say("free_page_bytes type: ", type(free_page_bytes))
ngx.say("free_page_bytes: ", free_page_bytes)
}
}
--- request
GET /t
--- response_body
free_page_bytes type: number
free_page_bytes: 0
--- no_error_log
[error]
[alert]
[crit]
=== TEST 37: free_space, got forcible
--- skip_nginx: 5: < 1.11.7
--- config
location = /t {
content_by_lua_block {
local cats = ngx.shared.cats
cats:flush_all()
cats:flush_expired()
for i = 1, 64 do
local key = string.format("key%05d", i)
local val = string.format("val%05d", i)
local success, err, forcible = cats:set(key, val)
if err ~= nil then
ngx.say(string.format("got error, i=%d, err=%s", i, err))
end
if forcible then
ngx.say(string.format("got forcible, i=%d", i))
end
if not success then
ngx.say(string.format("got not success, i=%d", i))
end
end
local free_page_bytes = cats:free_space()
ngx.say("free_page_bytes type: ", type(free_page_bytes))
ngx.say("free_page_bytes: ", free_page_bytes)
}
}
--- request
GET /t
--- response_body_like chomp
\A(?:got forcible, i=64
)?free_page_bytes type: number
free_page_bytes: 0
\z
--- no_error_log
[error]
[alert]
[crit]
=== TEST 38: free_space, full (100k)
--- skip_nginx: 5: < 1.11.7
--- config
location = /t {
content_by_lua_block {
local birds = ngx.shared.birds
birds:flush_all()
birds:flush_expired()
for i = 1, 1000 do
local key = string.format("key%05d", i)
local val = string.format("val%05d", i)
local ok, err, forcible = birds:set(key, val)
if err ~= nil then
ngx.say(string.format("got error, i=%d, err=%s", i, err))
end
if forcible then
ngx.say(string.format("got forcible, i=%d", i))
break
end
if not ok then
ngx.say(string.format("got not ok, i=%d", i))
break
end
end
local free_page_bytes = birds:free_space()
ngx.say("free_page_bytes type: ", type(free_page_bytes))
ngx.say("free_page_bytes: ", free_page_bytes)
}
}
--- request
GET /t
--- response_body_like chomp
\A(?:got forcible, i=736
)?free_page_bytes type: number
free_page_bytes: (?:0|32768)
\z
--- no_error_log
[error]
[alert]
[crit]
=== TEST 39: incr bad init_ttl argument
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
local pok, err = pcall(dogs.incr, dogs, "foo", 1, 0, -1)
if not pok then
ngx.say("not ok: ", err)
return
end
ngx.say("ok")
}
}
--- request
GET /t
--- response_body
not ok: bad "init_ttl" argument
--- no_error_log
[error]
[alert]
[crit]
=== TEST 40: incr init_ttl argument is not a number
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
local pok, err = pcall(dogs.incr, dogs, "foo", 1, 0, "bar")
if not pok then
ngx.say("not ok: ", err)
return
end
ngx.say("ok")
}
}
--- request
GET /t
--- response_body
not ok: bad init_ttl arg: number expected, got string
--- no_error_log
[error]
[alert]
[crit]
=== TEST 41: incr init_ttl argument without init
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
local pok, err = pcall(dogs.incr, dogs, "foo", 1, nil, 0.01)
if not pok then
ngx.say("not ok: ", err)
return
end
ngx.say("ok")
}
}
--- request
GET /t
--- response_body
not ok: must provide "init" when providing "init_ttl"
--- no_error_log
[error]
[alert]
[crit]
=== TEST 42: incr key with init_ttl (key exists)
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
dogs:set("foo", 32)
local res, err = dogs:incr("foo", 10502, 0, 0.01)
ngx.say("incr: ", res, " ", err)
ngx.say("foo = ", dogs:get("foo"))
ngx.update_time()
ngx.sleep(0.02)
ngx.say("foo after incr init_ttl = ", dogs:get("foo"))
}
}
--- request
GET /t
--- response_body
incr: 10534 nil
foo = 10534
foo after incr init_ttl = 10534
--- no_error_log
[error]
[alert]
[crit]
=== TEST 43: incr key with init and init_ttl (key not exists)
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
dogs:flush_all()
local res, err = dogs:incr("foo", 10502, 1, 0.01)
ngx.say("incr: ", res, " ", err)
ngx.say("foo = ", dogs:get("foo"))
ngx.update_time()
ngx.sleep(0.02)
ngx.say("foo after init_ttl = ", dogs:get("foo"))
}
}
--- request
GET /t
--- response_body
incr: 10503 nil
foo = 10503
foo after init_ttl = nil
--- no_error_log
[error]
[alert]
[crit]
=== TEST 44: incr key with init and init_ttl as string (key not exists)
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
dogs:flush_all()
local res, err = dogs:incr("foo", 10502, 1, "0.01")
ngx.say("incr: ", res, " ", err)
ngx.say("foo = ", dogs:get("foo"))
ngx.update_time()
ngx.sleep(0.02)
ngx.say("foo after init_ttl = ", dogs:get("foo"))
}
}
--- request
GET /t
--- response_body
incr: 10503 nil
foo = 10503
foo after init_ttl = nil
--- no_error_log
[error]
[alert]
[crit]
=== TEST 45: incr key with init and init_ttl (key expired and size matched)
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
for i = 1, 20 do
dogs:set("bar" .. i, i, 0.02)
end
dogs:set("foo", 32, 0.02)
ngx.update_time()
ngx.sleep(0.03)
local res, err = dogs:incr("foo", 10502, 0, 0.01)
ngx.say("incr: ", res, " ", err)
ngx.say("foo = ", dogs:get("foo"))
ngx.update_time()
ngx.sleep(0.02)
ngx.say("foo after init_ttl = ", dogs:get("foo"))
}
}
--- request
GET /t
--- response_body
incr: 10502 nil
foo = 10502
foo after init_ttl = nil
--- no_error_log
[error]
[alert]
[crit]
=== TEST 46: incr key with init and init_ttl (forcibly override other valid entries)
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
dogs:flush_all()
local long_prefix = string.rep("1234567890", 100)
for i = 1, 1000 do
local success, err, forcible = dogs:set(long_prefix .. i, i)
if forcible then
dogs:delete(long_prefix .. i)
break
end
end
local res, err, forcible = dogs:incr(long_prefix .. "bar", 10502, 0)
ngx.say("incr: ", res, " ", err, " ", forcible)
local res, err, forcible = dogs:incr(long_prefix .. "foo", 10502, 0, 0.01)
ngx.say("incr: ", res, " ", err, " ", forcible)
ngx.say("foo = ", dogs:get(long_prefix .. "foo"))
ngx.update_time()
ngx.sleep(0.02)
ngx.say("foo after init_ttl = ", dogs:get("foo"))
}
}
--- request
GET /t
--- response_body
incr: 10502 nil false
incr: 10502 nil true
foo = 10502
foo after init_ttl = nil
--- no_error_log
[error]
[alert]
[crit]
=== TEST 47: exptime uses long type to avoid overflow in set() + ttl()
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
dogs:flush_all()
local ok, err = dogs:set("huge_ttl", true, 2 ^ 31)
if not ok then
ngx.say("err setting: ", err)
return
end
local ttl, err = dogs:ttl("huge_ttl")
if not ttl then
ngx.say("err retrieving ttl: ", err)
return
end
ngx.say("ttl: ", ttl)
}
}
--- request
GET /t
--- response_body
ttl: 2147483648
--- no_error_log
[error]
[alert]
[crit]
=== TEST 48: exptime uses long type to avoid overflow in expire() + ttl()
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
dogs:flush_all()
local ok, err = dogs:set("updated_huge_ttl", true)
if not ok then
ngx.say("err setting: ", err)
return
end
local ok, err = dogs:expire("updated_huge_ttl", 2 ^ 31)
if not ok then
ngx.say("err expire: ", err)
return
end
local ttl, err = dogs:ttl("updated_huge_ttl")
if not ttl then
ngx.say("err retrieving ttl: ", err)
return
end
ngx.say("ttl: ", ttl)
}
}
--- request
GET /t
--- response_body
ttl: 2147483648
--- no_error_log
[error]
[alert]
[crit]
=== TEST 49: init_ttl uses long type to avoid overflow in incr() + ttl()
--- config
location = /t {
content_by_lua_block {
local dogs = ngx.shared.dogs
dogs:flush_all()
local ok, err = dogs:incr("incr_huge_ttl", 1, 0, 2 ^ 31)
if not ok then
ngx.say("err incr: ", err)
return
end
local ttl, err = dogs:ttl("incr_huge_ttl")
if not ttl then
ngx.say("err retrieving ttl: ", err)
return
end
ngx.say("ttl: ", ttl)
}
}
--- request
GET /t
--- response_body
ttl: 2147483648
--- no_error_log
[error]
[alert]
[crit]
=== TEST 50: check zone argument
--- config
location = /t {
content_by_lua_block {
local function check_in_pcall(f, ...)
local ok, err = pcall(f, ...)
if not ok then
ngx.say(err)
else
ngx.say("ok")
end
end
local dogs = ngx.shared.dogs
check_in_pcall(dogs.set, dogs, 'k', 1)
check_in_pcall(dogs.set, 'k', 1)
check_in_pcall(dogs.set, {1}, 'k', 1)
check_in_pcall(dogs.set, {ngx.null}, 'k', 1)
}
}
--- request
GET /t
--- response_body
ok
bad "zone" argument
bad "zone" argument
bad "zone" argument
--- no_error_log
[error]
[alert]
[crit]
=== TEST 51: free_space, not supported in NGINX < 1.11.7
--- skip_nginx: 5: >= 1.11.7
--- config
location = /t {
content_by_lua_block {
local birds = ngx.shared.birds
local pok, perr = pcall(function ()
birds:free_space()
end)
if not pok then
ngx.say(perr)
end
}
}
--- request
GET /t
--- response_body_like
content_by_lua\(nginx\.conf:\d+\):\d+: 'shm:free_space\(\)' not supported in NGINX < 1.11.7
--- no_error_log
[error]
[alert]
[crit]
| 23.512696 | 91 | 0.506917 |
ed460f959d41a063ce95d197f8348cee46b91451 | 687 | t | Perl | Classes/discount-1.5.4/tests/code.t | JHP4911/NOTTaskPaperForIOS | 455ff0ea1768f6ed0abfc731bc7f5ce8e73c0314 | [
"Unlicense"
] | 85 | 2015-01-13T09:03:43.000Z | 2021-04-22T04:42:32.000Z | Classes/discount-1.5.4/tests/code.t | larryoh/NOTTaskPaperForIOS | 455ff0ea1768f6ed0abfc731bc7f5ce8e73c0314 | [
"Unlicense"
] | 1 | 2015-04-30T05:04:38.000Z | 2015-04-30T13:12:54.000Z | Classes/discount-1.5.4/tests/code.t | larryoh/NOTTaskPaperForIOS | 455ff0ea1768f6ed0abfc731bc7f5ce8e73c0314 | [
"Unlicense"
] | 41 | 2015-01-21T16:37:53.000Z | 2021-04-19T07:27:04.000Z | ./echo "code blocks"
rc=0
MARKDOWN_FLAGS=
try() {
unset FLAGS
case "$1" in
-*) FLAGS=$1
shift ;;
esac
./echo -n " $1" '..................................' | ./cols 36
Q=`./echo "$2" | ./markdown $FLAGS`
if [ "$3" = "$Q" ]; then
./echo " ok"
else
./echo " FAILED"
./echo "wanted: $3"
./echo "got : $Q"
rc=1
fi
}
try 'format for code block html' \
' this is
code' \
'<pre><code>this is
code
</code></pre>'
try 'unclosed single backtick' '`hi there' '<p>`hi there</p>'
try 'unclosed double backtick' '``hi there' '<p>``hi there</p>'
try 'remove space around code' '`` hi there ``' '<p><code>hi there</code></p>'
exit $rc
| 17.175 | 78 | 0.500728 |
eda7626fddd47899dc31dec2e2a8b29ebdd4f75e | 3,755 | pm | Perl | Net-DBus/lib/Net/DBus/Error.pm | freedesktop/system-tools-backends | a38fecd0269bb04a9384d78ccb83944efbdb959c | [
"Artistic-1.0-Perl"
] | 1 | 2020-04-05T18:46:16.000Z | 2020-04-05T18:46:16.000Z | Net-DBus/lib/Net/DBus/Error.pm | freedesktop/system-tools-backends | a38fecd0269bb04a9384d78ccb83944efbdb959c | [
"Artistic-1.0-Perl"
] | null | null | null | Net-DBus/lib/Net/DBus/Error.pm | freedesktop/system-tools-backends | a38fecd0269bb04a9384d78ccb83944efbdb959c | [
"Artistic-1.0-Perl"
] | 2 | 2019-03-25T01:36:06.000Z | 2021-09-27T04:05:08.000Z | # -*- perl -*-
#
# Copyright (C) 2004-2006 Daniel P. Berrange
#
# This program is free software; You can redistribute it and/or modify
# it under the same terms as Perl itself. Either:
#
# a) the GNU General Public License as published by the Free
# Software Foundation; either version 2, or (at your option) any
# later version,
#
# or
#
# b) the "Artistic License"
#
# The file "COPYING" distributed along with this file provides full
# details of the terms and conditions of the two licenses.
=pod
=head1 NAME
Net::DBus::Error - Error details for remote method invocation
=head1 SYNOPSIS
package Music::Player::UnknownFormat;
use base qw(Net::DBus::Error);
# Define an error type for unknown track encoding type
# for a music player service
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $self = $class->SUPER::new(name => "org.example.music.UnknownFormat",
message => "Unknown track encoding format");
}
package Music::Player::Engine;
...snip...
# Play either mp3 or ogg music tracks, otherwise
# thrown an error
sub play {
my $self = shift;
my $url = shift;
if ($url =~ /\.(mp3|ogg)$/) {
...play the track
} else {
die Music::Player::UnknownFormat->new();
}
}
=head1 DESCRIPTION
This objects provides for strongly typed error handling. Normally
a service would simply call
die "some message text"
When returning the error condition to the calling DBus client, the
message is associated with a generic error code or "org.freedesktop.DBus.Failed".
While this suffices for many applications, occasionally it is desirable
to be able to catch and handle specific error conditions. For such
scenarios the service should create subclasses of the C<Net::DBus::Error>
object providing in a custom error name. This error name is then sent back
to the client instead of the genreic "org.freedesktop.DBus.Failed" code.
=head1 METHODS
=over 4
=cut
package Net::DBus::Error;
use strict;
use warnings;
use overload ('""' => 'stringify');
=item my $error = Net::DBus::Error->new(name => $error_name,
message => $description);
Creates a new error object whose name is given by the C<name>
parameter, and long descriptive text is provided by the
C<message> parameter. The C<name> parameter has certain
formatting rules which must be adhered to. It must only contain
the letters 'a'-'Z', '0'-'9', '-', '_' and '.'. There must be
at least two components separated by a '.', For example a valid
name is 'org.example.Music.UnknownFormat'.
=cut
sub new {
my $proto = shift;
my $class = ref($proto) || $proto;
my $self = {};
my %params = @_;
$self->{name} = $params{name} ? $params{name} : die "name parameter is required";
$self->{message} = $params{message} ? $params{message} : die "message parameter is required";
bless $self, $class;
return $self;
}
=item $error->name
Returns the DBus error name associated with the object.
=cut
sub name {
my $self = shift;
return $self->{name};
}
=item $error->message
Returns the descriptive text/message associated with the
error condition.
=cut
sub message {
my $self = shift;
return $self->{message};
}
=item $error->stringify
Formats the error as a string in a manner suitable for
printing out / logging / displaying to the user, etc.
=cut
sub stringify {
my $self = shift;
return $self->{name} . ": " . $self->{message} . ($self->{message} =~ /\n$/ ? "" : "\n");
}
1;
=pod
=back
=head1 AUTHORS
Daniel P. Berrange
=head1 COPYRIGHT
Copyright (C) 2005-2006 Daniel P. Berrange
=head1 SEE ALSO
L<Net::DBus>, L<Net::DBus::Object>
=cut
| 21.959064 | 97 | 0.665779 |
ed6ed6bc410e66663bcb95591e379b5a0424db58 | 44,473 | pm | Perl | vendor/Image-ExifTool-11.80/lib/Image/ExifTool/Qualcomm.pm | zealot128-os/mini_exiftool_vendored | 433b686f16a2ca3fcbbb9770922f842cfd2df5b7 | [
"MIT"
] | 1 | 2020-01-27T18:05:11.000Z | 2020-01-27T18:05:11.000Z | vendor/Image-ExifTool-11.80/lib/Image/ExifTool/Qualcomm.pm | zealot128-os/mini_exiftool_vendored | 433b686f16a2ca3fcbbb9770922f842cfd2df5b7 | [
"MIT"
] | 15 | 2019-06-12T22:35:58.000Z | 2020-01-10T22:40:47.000Z | vendor/Image-ExifTool-11.80/lib/Image/ExifTool/Qualcomm.pm | zealot128-os/mini_exiftool_vendored | 433b686f16a2ca3fcbbb9770922f842cfd2df5b7 | [
"MIT"
] | 3 | 2019-05-23T09:46:31.000Z | 2020-05-11T21:03:20.000Z | #------------------------------------------------------------------------------
# File: Qualcomm.pm
#
# Description: Read Qualcomm APP7 meta information
#
# Revisions: 2012/02/14 - P. Harvey Created
#------------------------------------------------------------------------------
package Image::ExifTool::Qualcomm;
use strict;
use vars qw($VERSION);
use Image::ExifTool qw(:DataAccess :Utils);
$VERSION = '1.01';
sub ProcessQualcomm($$$);
sub MakeNameAndDesc($$);
# Qualcomm format codes (ref PH (NC))
my @qualcommFormat = (
'int8u', 'int8s', 'int16u', 'int16s',
'int32u', 'int32s', 'float', 'double',
);
# information found in JPEG APP7 Qualcomm Camera Attributes segment
%Image::ExifTool::Qualcomm::Main = (
PROCESS_PROC => \&ProcessQualcomm,
GROUPS => { 0 => 'MakerNotes', 2 => 'Camera' },
VARS => { NO_ID => 1, NO_LOOKUP => 1 }, # too long, too many, and too obscure
NOTES => q{
The tags below have been observed in the JPEG APP7 "Qualcomm Camera
Attributes" segment written by some cameras such as the HP iPAQ Voice
Messenger. ExifTool will extract any information found from this segment,
even if it is not listed in this table.
},
'aec_current_sensor_luma' => { },
'af_position' => { },
'aec_current_exp_index' => { },
'awb_sample_decision' => { },
'asf5_enable' => { },
'asf5_filter_mode' => { },
'asf5_exposure_index_1' => { },
'asf5_exposure_index_2' => { },
'asf5_max_exposure_index' => { },
'asf5_luma_filter[0]' => { },
'asf5_luma_filter[1]' => { },
'asf5_luma_filter[2]' => { },
'asf5_luma_filter[3]' => { },
'asf5_luma_filter[4]' => { },
'asf5_luma_filter[5]' => { },
'asf5_luma_filter[6]' => { },
'asf5_luma_filter[7]' => { },
'asf5_luma_filter[8]' => { },
'asf5_filter1_a11' => { },
'asf5_filter1_a12' => { },
'asf5_filter1_a13' => { },
'asf5_filter1_a14' => { },
'asf5_filter1_a15' => { },
'asf5_filter1_a21' => { },
'asf5_filter1_a22' => { },
'asf5_filter1_a23' => { },
'asf5_filter1_a24' => { },
'asf5_filter1_a25' => { },
'asf5_filter1_a31' => { },
'asf5_filter1_a32' => { },
'asf5_filter1_a33' => { },
'asf5_filter1_a34' => { },
'asf5_filter1_a35' => { },
'asf5_filter1_a41' => { },
'asf5_filter1_a42' => { },
'asf5_filter1_a43' => { },
'asf5_filter1_a44' => { },
'asf5_filter1_a45' => { },
'asf5_filter1_a51' => { },
'asf5_filter1_a52' => { },
'asf5_filter1_a53' => { },
'asf5_filter1_a54' => { },
'asf5_filter1_a55' => { },
'asf5_filter2_a11' => { },
'asf5_filter2_a12' => { },
'asf5_filter2_a13' => { },
'asf5_filter2_a14' => { },
'asf5_filter2_a15' => { },
'asf5_filter2_a21' => { },
'asf5_filter2_a22' => { },
'asf5_filter2_a23' => { },
'asf5_filter2_a24' => { },
'asf5_filter2_a25' => { },
'asf5_filter2_a31' => { },
'asf5_filter2_a32' => { },
'asf5_filter2_a33' => { },
'asf5_filter2_a34' => { },
'asf5_filter2_a35' => { },
'asf5_filter2_a41' => { },
'asf5_filter2_a42' => { },
'asf5_filter2_a43' => { },
'asf5_filter2_a44' => { },
'asf5_filter2_a45' => { },
'asf5_filter2_a51' => { },
'asf5_filter2_a52' => { },
'asf5_filter2_a53' => { },
'asf5_filter2_a54' => { },
'asf5_filter2_a55' => { },
'asf5_nrmize_factor1' => { },
'asf5_nrmize_factor2' => { },
'asf5_low_lo_thres' => { },
'asf5_low_up_thres' => { },
'asf5_low_shrp_deg_f1' => { },
'asf5_low_shrp_deg_f2' => { },
'asf5_low_smth_prcnt' => { },
'asf5_nrm_lo_thres' => { },
'asf5_nrm_up_thres' => { },
'asf5_nrm_shrp_deg_f1' => { },
'asf5_nrm_shrp_deg_f2' => { },
'asf5_nrm_smth_prcnt' => { },
'asf5_brt_lo_thres' => { },
'asf5_brt_up_thres' => { },
'asf5_brt_shrp_deg_f1' => { },
'asf5_brt_shrp_deg_f2' => { },
'asf5_brt_smth_percent' => { },
'asf3_enable' => { },
'asf3_edge_filter_a11' => { },
'asf3_edge_filter_a12' => { },
'asf3_edge_filter_a13' => { },
'asf3_edge_filter_a21' => { },
'asf3_edge_filter_a22' => { },
'asf3_edge_filter_a23' => { },
'asf3_edge_filter_a31' => { },
'asf3_edge_filter_a32' => { },
'asf3_edge_filter_a33' => { },
'asf3_noise_filter_a11' => { },
'asf3_noise_filter_a12' => { },
'asf3_noise_filter_a13' => { },
'asf3_noise_filter_a21' => { },
'asf3_noise_filter_a22' => { },
'asf3_noise_filter_a23' => { },
'asf3_noise_filter_a31' => { },
'asf3_noise_filter_a32' => { },
'asf3_noise_filter_a33' => { },
'asf3_lower_threshold' => { },
'asf3_upper_threshold' => { },
'asf3_edge_detect' => { },
'aec_enable' => { },
'aec_mode' => { },
'aec_aggressiveness' => { },
'aec_luma_target' => { },
'aec_luma_tolerance' => { },
'aec_indoor_idx' => { },
'aec_odoor_idx' => { },
'aec_exposure_index_adj_step' => { },
'aec_outdoor_gamma_index' => { },
'aec_vfe_luma' => { },
'aec_high_luma_region_threshold' => { },
'aec_snapshot_sensor_gain' => { },
'aec_snapshot_digital_gain' => { },
'aec_snapshot_line_count' => { },
'aec_snapshot_exposure_time_ms' => { },
'aec_outdoor_bright_enable' => { },
'aec_outdoor_bright_reduction' => { },
'aec_outdoor_bright_threshold_LO' => { },
'aec_outdoor_bright_threshold_HI' => { },
'aec_outdoor_bright_discarded' => { },
'aec_high_luma_region_count' => { },
'antibanding_enable' => { },
'anti_bading_pixel_clk' => { },
'anti_bading_pixel_clk_per_line' => { },
'afr_enable' => { },
'afr_faster_0_trigger' => { },
'afr_slower_0_trigger' => { },
'afr_faster_0_exp_mod' => { },
'afr_slower_0_exp_mod' => { },
'afr_faster_1_trigger' => { },
'afr_slower_1_trigger' => { },
'afr_faster_1_exp_mod' => { },
'afr_slower_1_exp_mod' => { },
'afr_faster_2_trigger' => { },
'afr_slower_2_trigger' => { },
'afr_faster_2_exp_mod' => { },
'afr_slower_2_exp_mod' => { },
'afr_faster_3_trigger' => { },
'afr_slower_3_trigger' => { },
'afr_faster_3_exp_mod' => { },
'afr_slower_3_exp_mod' => { },
'afr_faster_4_trigger' => { },
'afr_slower_4_trigger' => { },
'afr_faster_4_exp_mod' => { },
'afr_slower_4_exp_mod' => { },
'afr_possible_frame_cnt' => { },
'af_enable' => { },
'af_steps_near_far' => { },
'af_steps_near_infinity' => { },
'af_gross_step' => { },
'af_fine_step' => { },
'af_fine_srch_points' => { },
'af_process' => { },
'af_mode' => { },
'af_near_end' => { },
'af_boundary' => { },
'af_far_end' => { },
'af_collect_end_stat' => { },
'af_test_mode' => { },
'af_undershoot_protect' => { },
'af_reset_lens_after_snap' => { },
'clip_to_af_rato' => { },
'af_pos_def_macro' => { },
'af_pos_def_norm' => { },
'af_vfe_vert_offset' => { },
'af_vfe_horz_offset' => { },
'af_vfe_vert_height' => { },
'af_vfe_horz_width' => { },
'af_vfe_metric_max' => { },
'af_trace_positions[0]' => { },
'af_trace_positions[1]' => { },
'af_trace_positions[2]' => { },
'af_trace_positions[3]' => { },
'af_trace_positions[4]' => { },
'af_trace_positions[5]' => { },
'af_trace_positions[6]' => { },
'af_trace_positions[7]' => { },
'af_trace_positions[8]' => { },
'af_trace_positions[9]' => { },
'af_trace_positions[10]' => { },
'af_trace_positions[11]' => { },
'af_trace_positions[12]' => { },
'af_trace_positions[13]' => { },
'af_trace_positions[14]' => { },
'af_trace_positions[15]' => { },
'af_trace_positions[16]' => { },
'af_trace_positions[17]' => { },
'af_trace_positions[18]' => { },
'af_trace_positions[19]' => { },
'af_trace_positions[20]' => { },
'af_trace_positions[21]' => { },
'af_trace_positions[22]' => { },
'af_trace_positions[23]' => { },
'af_trace_positions[24]' => { },
'af_trace_positions[25]' => { },
'af_trace_positions[26]' => { },
'af_trace_positions[27]' => { },
'af_trace_positions[28]' => { },
'af_trace_positions[29]' => { },
'af_trace_positions[30]' => { },
'af_trace_positions[31]' => { },
'af_trace_positions[32]' => { },
'af_trace_positions[33]' => { },
'af_trace_positions[34]' => { },
'af_trace_positions[35]' => { },
'af_trace_positions[36]' => { },
'af_trace_positions[37]' => { },
'af_trace_positions[38]' => { },
'af_trace_positions[39]' => { },
'af_trace_positions[40]' => { },
'af_trace_positions[41]' => { },
'af_trace_positions[42]' => { },
'af_trace_positions[43]' => { },
'af_trace_positions[44]' => { },
'af_trace_positions[45]' => { },
'af_trace_positions[46]' => { },
'af_trace_positions[47]' => { },
'af_trace_positions[48]' => { },
'af_trace_positions[49]' => { },
'af_trace_stats[0]' => { },
'af_trace_stats[1]' => { },
'af_trace_stats[2]' => { },
'af_trace_stats[3]' => { },
'af_trace_stats[4]' => { },
'af_trace_stats[5]' => { },
'af_trace_stats[6]' => { },
'af_trace_stats[7]' => { },
'af_trace_stats[8]' => { },
'af_trace_stats[9]' => { },
'af_trace_stats[10]' => { },
'af_trace_stats[11]' => { },
'af_trace_stats[12]' => { },
'af_trace_stats[13]' => { },
'af_trace_stats[14]' => { },
'af_trace_stats[15]' => { },
'af_trace_stats[16]' => { },
'af_trace_stats[17]' => { },
'af_trace_stats[18]' => { },
'af_trace_stats[19]' => { },
'af_trace_stats[20]' => { },
'af_trace_stats[21]' => { },
'af_trace_stats[22]' => { },
'af_trace_stats[23]' => { },
'af_trace_stats[24]' => { },
'af_trace_stats[25]' => { },
'af_trace_stats[26]' => { },
'af_trace_stats[27]' => { },
'af_trace_stats[28]' => { },
'af_trace_stats[29]' => { },
'af_trace_stats[30]' => { },
'af_trace_stats[31]' => { },
'af_trace_stats[32]' => { },
'af_trace_stats[33]' => { },
'af_trace_stats[34]' => { },
'af_trace_stats[35]' => { },
'af_trace_stats[36]' => { },
'af_trace_stats[37]' => { },
'af_trace_stats[38]' => { },
'af_trace_stats[39]' => { },
'af_trace_stats[40]' => { },
'af_trace_stats[41]' => { },
'af_trace_stats[42]' => { },
'af_trace_stats[43]' => { },
'af_trace_stats[44]' => { },
'af_trace_stats[45]' => { },
'af_trace_stats[46]' => { },
'af_trace_stats[47]' => { },
'af_trace_stats[48]' => { },
'af_trace_stats[49]' => { },
'af_focus_time' => { },
'awb_enable' => { },
'awb_algorithm' => { },
'awb_aggressiveness' => { },
'awb_red_gain_ref1' => { },
'awb_blue_gain_ref1' => { },
'awb_red_gain_adj_ref1' => { },
'awb_blue_gain_adj_ref1' => { },
'awb_red_gain_ref2' => { },
'awb_blue_gain_ref2' => { },
'awb_red_gain_adj_ref2' => { },
'awb_blue_gain_adj_ref2' => { },
'awb_red_gain_ref3' => { },
'awb_blue_gain_ref3' => { },
'awb_red_gain_adj_ref3' => { },
'awb_blue_gain_adj_ref3' => { },
'awb_red_gain_ref4' => { },
'awb_blue_gain_ref4' => { },
'awb_red_gain_adj_ref4' => { },
'awb_blue_gain_adj_ref4' => { },
'awb_red_gain_ref5' => { },
'awb_blue_gain_ref5' => { },
'awb_red_gain_adj_ref5' => { },
'awb_blue_gain_adj_ref5' => { },
'awb_red_gain_ref6' => { },
'awb_blue_gain_ref6' => { },
'awb_red_gain_adj_ref6' => { },
'awb_blue_gain_adj_ref6' => { },
'awb_red_gain_ref7' => { },
'awb_blue_gain_ref7' => { },
'awb_red_gain_adj_ref7' => { },
'awb_blue_gain_adj_ref7' => { },
'awb_red_gain_ref8' => { },
'awb_blue_gain_ref8' => { },
'awb_red_gain_adj_ref8' => { },
'awb_blue_gain_adj_ref8' => { },
'awb_lo_vfe_max_y' => { },
'awb_lo_vfe_min_y' => { },
'awb_lo_vfe_m1' => { },
'awb_lo_vfe_m2' => { },
'awb_lo_vfe_m3' => { },
'awb_lo_vfe_m4' => { },
'awb_lo_vfe_c1' => { },
'awb_lo_vfe_c2' => { },
'awb_lo_vfe_c3' => { },
'awb_lo_vfe_c4' => { },
'awb_norm_vfe_max_y' => { },
'awb_norm_vfe_min_y' => { },
'awb_norm_vfe_m1' => { },
'awb_norm_vfe_m2' => { },
'awb_norm_vfe_m3' => { },
'awb_norm_vfe_m4' => { },
'awb_norm_vfe_c1' => { },
'awb_norm_vfe_c2' => { },
'awb_norm_vfe_c3' => { },
'awb_norm_vfe_c4' => { },
'awb_oudor_vfe_max_y' => { },
'awb_oudor_vfe_min_y' => { },
'awb_oudor_vfe_m1' => { },
'awb_oudor_vfe_m2' => { },
'awb_oudor_vfe_m3' => { },
'awb_oudor_vfe_m4' => { },
'awb_oudor_vfe_c1' => { },
'awb_oudor_vfe_c2' => { },
'awb_oudor_vfe_c3' => { },
'awb_oudor_vfe_c4' => { },
'awb_cc_bias' => { },
'awb_min_r_gain' => { },
'awb_min_g_gain' => { },
'awb_min_b_gain' => { },
'awb_max_r_gain' => { },
'awb_max_g_gain' => { },
'awb_max_b_gain' => { },
'awb_outdoor_sample_influence' => { },
'awb_indoor_sample_influence' => { },
'awb_low_lig_col_cor_ena' => { },
'awb_agw_grid_dist_2_thresh' => { },
'awb_ave_rg_ratio' => { },
'awb_ave_bg_ratio' => { },
'awb_compact_cluster_R2' => { },
'outlier_distance' => { },
'awb_green_offset_rg' => { },
'awb_green_offset_bg' => { },
'awb_prev_wb_rgain' => { },
'awb_prev_wb_ggain' => { },
'awb_prev_wb_bgain' => { },
'awb_snapshot_r_gain' => { },
'awb_snapshot_b_gain' => { },
'rolloff_enable' => { },
'r2_tl84_cx' => { },
'r2_tl84_cy' => { },
'r2_tl84_width' => { },
'r2_tl84_height' => { },
'r2_tl84_intervals' => { },
'r2_tl84_tbl[0]' => { },
'r2_tl84_tbl[1]' => { },
'r2_tl84_tbl[2]' => { },
'r2_tl84_tbl[3]' => { },
'r2_tl84_tbl[4]' => { },
'r2_tl84_tbl[5]' => { },
'r2_tl84_tbl[6]' => { },
'r2_tl84_tbl[7]' => { },
'r2_tl84_tbl[8]' => { },
'r2_tl84_tbl[9]' => { },
'r2_tl84_tbl[10]' => { },
'r2_tl84_tbl[11]' => { },
'r2_tl84_tbl[12]' => { },
'r2_tl84_tbl[13]' => { },
'r2_tl84_tbl[14]' => { },
'r2_tl84_tbl[15]' => { },
'r2_tl84_tbl[16]' => { },
'r2_tl84_tbl[17]' => { },
'r2_tl84_tbl[18]' => { },
'r2_tl84_tbl[19]' => { },
'r2_tl84_tbl[20]' => { },
'r2_tl84_tbl[21]' => { },
'r2_tl84_tbl[22]' => { },
'r2_tl84_tbl[23]' => { },
'r2_tl84_tbl[24]' => { },
'r2_tl84_tbl[25]' => { },
'r2_tl84_tbl[26]' => { },
'r2_tl84_tbl[27]' => { },
'r2_tl84_tbl[28]' => { },
'r2_tl84_tbl[29]' => { },
'r2_tl84_tbl[30]' => { },
'r2_tl84_tbl[31]' => { },
'r2_tl84_red_ctbl[0]' => { },
'r2_tl84_red_ctbl[1]' => { },
'r2_tl84_red_ctbl[2]' => { },
'r2_tl84_red_ctbl[3]' => { },
'r2_tl84_red_ctbl[4]' => { },
'r2_tl84_red_ctbl[5]' => { },
'r2_tl84_red_ctbl[6]' => { },
'r2_tl84_red_ctbl[7]' => { },
'r2_tl84_red_ctbl[8]' => { },
'r2_tl84_red_ctbl[9]' => { },
'r2_tl84_red_ctbl[10]' => { },
'r2_tl84_red_ctbl[11]' => { },
'r2_tl84_red_ctbl[12]' => { },
'r2_tl84_red_ctbl[13]' => { },
'r2_tl84_red_ctbl[14]' => { },
'r2_tl84_red_ctbl[15]' => { },
'r2_tl84_red_ctbl[16]' => { },
'r2_tl84_red_ctbl[17]' => { },
'r2_tl84_red_ctbl[18]' => { },
'r2_tl84_red_ctbl[19]' => { },
'r2_tl84_red_ctbl[20]' => { },
'r2_tl84_red_ctbl[21]' => { },
'r2_tl84_red_ctbl[22]' => { },
'r2_tl84_red_ctbl[23]' => { },
'r2_tl84_red_ctbl[24]' => { },
'r2_tl84_red_ctbl[25]' => { },
'r2_tl84_red_ctbl[26]' => { },
'r2_tl84_red_ctbl[27]' => { },
'r2_tl84_red_ctbl[28]' => { },
'r2_tl84_red_ctbl[29]' => { },
'r2_tl84_red_ctbl[30]' => { },
'r2_tl84_red_ctbl[31]' => { },
'r2_tl84_green_ctbl[0]' => { },
'r2_tl84_green_ctbl[1]' => { },
'r2_tl84_green_ctbl[2]' => { },
'r2_tl84_green_ctbl[3]' => { },
'r2_tl84_green_ctbl[4]' => { },
'r2_tl84_green_ctbl[5]' => { },
'r2_tl84_green_ctbl[6]' => { },
'r2_tl84_green_ctbl[7]' => { },
'r2_tl84_green_ctbl[8]' => { },
'r2_tl84_green_ctbl[9]' => { },
'r2_tl84_green_ctbl[10]' => { },
'r2_tl84_green_ctbl[11]' => { },
'r2_tl84_green_ctbl[12]' => { },
'r2_tl84_green_ctbl[13]' => { },
'r2_tl84_green_ctbl[14]' => { },
'r2_tl84_green_ctbl[15]' => { },
'r2_tl84_green_ctbl[16]' => { },
'r2_tl84_green_ctbl[17]' => { },
'r2_tl84_green_ctbl[18]' => { },
'r2_tl84_green_ctbl[19]' => { },
'r2_tl84_green_ctbl[20]' => { },
'r2_tl84_green_ctbl[21]' => { },
'r2_tl84_green_ctbl[22]' => { },
'r2_tl84_green_ctbl[23]' => { },
'r2_tl84_green_ctbl[24]' => { },
'r2_tl84_green_ctbl[25]' => { },
'r2_tl84_green_ctbl[26]' => { },
'r2_tl84_green_ctbl[27]' => { },
'r2_tl84_green_ctbl[28]' => { },
'r2_tl84_green_ctbl[29]' => { },
'r2_tl84_green_ctbl[30]' => { },
'r2_tl84_green_ctbl[31]' => { },
'r2_tl84_blue_ctbl[0]' => { },
'r2_tl84_blue_ctbl[1]' => { },
'r2_tl84_blue_ctbl[2]' => { },
'r2_tl84_blue_ctbl[3]' => { },
'r2_tl84_blue_ctbl[4]' => { },
'r2_tl84_blue_ctbl[5]' => { },
'r2_tl84_blue_ctbl[6]' => { },
'r2_tl84_blue_ctbl[7]' => { },
'r2_tl84_blue_ctbl[8]' => { },
'r2_tl84_blue_ctbl[9]' => { },
'r2_tl84_blue_ctbl[10]' => { },
'r2_tl84_blue_ctbl[11]' => { },
'r2_tl84_blue_ctbl[12]' => { },
'r2_tl84_blue_ctbl[13]' => { },
'r2_tl84_blue_ctbl[14]' => { },
'r2_tl84_blue_ctbl[15]' => { },
'r2_tl84_blue_ctbl[16]' => { },
'r2_tl84_blue_ctbl[17]' => { },
'r2_tl84_blue_ctbl[18]' => { },
'r2_tl84_blue_ctbl[19]' => { },
'r2_tl84_blue_ctbl[20]' => { },
'r2_tl84_blue_ctbl[21]' => { },
'r2_tl84_blue_ctbl[22]' => { },
'r2_tl84_blue_ctbl[23]' => { },
'r2_tl84_blue_ctbl[24]' => { },
'r2_tl84_blue_ctbl[25]' => { },
'r2_tl84_blue_ctbl[26]' => { },
'r2_tl84_blue_ctbl[27]' => { },
'r2_tl84_blue_ctbl[28]' => { },
'r2_tl84_blue_ctbl[29]' => { },
'r2_tl84_blue_ctbl[30]' => { },
'r2_tl84_blue_ctbl[31]' => { },
'r2_tl84_red_stbl[0]' => { },
'r2_tl84_red_stbl[1]' => { },
'r2_tl84_red_stbl[2]' => { },
'r2_tl84_red_stbl[3]' => { },
'r2_tl84_red_stbl[4]' => { },
'r2_tl84_red_stbl[5]' => { },
'r2_tl84_red_stbl[6]' => { },
'r2_tl84_red_stbl[7]' => { },
'r2_tl84_red_stbl[8]' => { },
'r2_tl84_red_stbl[9]' => { },
'r2_tl84_red_stbl[10]' => { },
'r2_tl84_red_stbl[11]' => { },
'r2_tl84_red_stbl[12]' => { },
'r2_tl84_red_stbl[13]' => { },
'r2_tl84_red_stbl[14]' => { },
'r2_tl84_red_stbl[15]' => { },
'r2_tl84_red_stbl[16]' => { },
'r2_tl84_red_stbl[17]' => { },
'r2_tl84_red_stbl[18]' => { },
'r2_tl84_red_stbl[19]' => { },
'r2_tl84_red_stbl[20]' => { },
'r2_tl84_red_stbl[21]' => { },
'r2_tl84_red_stbl[22]' => { },
'r2_tl84_red_stbl[23]' => { },
'r2_tl84_red_stbl[24]' => { },
'r2_tl84_red_stbl[25]' => { },
'r2_tl84_red_stbl[26]' => { },
'r2_tl84_red_stbl[27]' => { },
'r2_tl84_red_stbl[28]' => { },
'r2_tl84_red_stbl[29]' => { },
'r2_tl84_red_stbl[30]' => { },
'r2_tl84_red_stbl[31]' => { },
'r2_tl84_blue_stbl[0]' => { },
'r2_tl84_blue_stbl[1]' => { },
'r2_tl84_blue_stbl[2]' => { },
'r2_tl84_blue_stbl[3]' => { },
'r2_tl84_blue_stbl[4]' => { },
'r2_tl84_blue_stbl[5]' => { },
'r2_tl84_blue_stbl[6]' => { },
'r2_tl84_blue_stbl[7]' => { },
'r2_tl84_blue_stbl[8]' => { },
'r2_tl84_blue_stbl[9]' => { },
'r2_tl84_blue_stbl[10]' => { },
'r2_tl84_blue_stbl[11]' => { },
'r2_tl84_blue_stbl[12]' => { },
'r2_tl84_blue_stbl[13]' => { },
'r2_tl84_blue_stbl[14]' => { },
'r2_tl84_blue_stbl[15]' => { },
'r2_tl84_blue_stbl[16]' => { },
'r2_tl84_blue_stbl[17]' => { },
'r2_tl84_blue_stbl[18]' => { },
'r2_tl84_blue_stbl[19]' => { },
'r2_tl84_blue_stbl[20]' => { },
'r2_tl84_blue_stbl[21]' => { },
'r2_tl84_blue_stbl[22]' => { },
'r2_tl84_blue_stbl[23]' => { },
'r2_tl84_blue_stbl[24]' => { },
'r2_tl84_blue_stbl[25]' => { },
'r2_tl84_blue_stbl[26]' => { },
'r2_tl84_blue_stbl[27]' => { },
'r2_tl84_blue_stbl[28]' => { },
'r2_tl84_blue_stbl[29]' => { },
'r2_tl84_blue_stbl[30]' => { },
'r2_tl84_blue_stbl[31]' => { },
'r2_tl84_green_stbl[0]' => { },
'r2_tl84_green_stbl[1]' => { },
'r2_tl84_green_stbl[2]' => { },
'r2_tl84_green_stbl[3]' => { },
'r2_tl84_green_stbl[4]' => { },
'r2_tl84_green_stbl[5]' => { },
'r2_tl84_green_stbl[6]' => { },
'r2_tl84_green_stbl[7]' => { },
'r2_tl84_green_stbl[8]' => { },
'r2_tl84_green_stbl[9]' => { },
'r2_tl84_green_stbl[10]' => { },
'r2_tl84_green_stbl[11]' => { },
'r2_tl84_green_stbl[12]' => { },
'r2_tl84_green_stbl[13]' => { },
'r2_tl84_green_stbl[14]' => { },
'r2_tl84_green_stbl[15]' => { },
'r2_tl84_green_stbl[16]' => { },
'r2_tl84_green_stbl[17]' => { },
'r2_tl84_green_stbl[18]' => { },
'r2_tl84_green_stbl[19]' => { },
'r2_tl84_green_stbl[20]' => { },
'r2_tl84_green_stbl[21]' => { },
'r2_tl84_green_stbl[22]' => { },
'r2_tl84_green_stbl[23]' => { },
'r2_tl84_green_stbl[24]' => { },
'r2_tl84_green_stbl[25]' => { },
'r2_tl84_green_stbl[26]' => { },
'r2_tl84_green_stbl[27]' => { },
'r2_tl84_green_stbl[28]' => { },
'r2_tl84_green_stbl[29]' => { },
'r2_tl84_green_stbl[30]' => { },
'r2_tl84_green_stbl[31]' => { },
'r2_d65_cx' => { },
'r2_d65_cy' => { },
'r2_d65_width' => { },
'r2_d65_height' => { },
'r2_d65_intervals' => { },
'r2_d65_tbl[0]' => { },
'r2_d65_tbl[1]' => { },
'r2_d65_tbl[2]' => { },
'r2_d65_tbl[3]' => { },
'r2_d65_tbl[4]' => { },
'r2_d65_tbl[5]' => { },
'r2_d65_tbl[6]' => { },
'r2_d65_tbl[7]' => { },
'r2_d65_tbl[8]' => { },
'r2_d65_tbl[9]' => { },
'r2_d65_tbl[10]' => { },
'r2_d65_tbl[11]' => { },
'r2_d65_tbl[12]' => { },
'r2_d65_tbl[13]' => { },
'r2_d65_tbl[14]' => { },
'r2_d65_tbl[15]' => { },
'r2_d65_tbl[16]' => { },
'r2_d65_tbl[17]' => { },
'r2_d65_tbl[18]' => { },
'r2_d65_tbl[19]' => { },
'r2_d65_tbl[20]' => { },
'r2_d65_tbl[21]' => { },
'r2_d65_tbl[22]' => { },
'r2_d65_tbl[23]' => { },
'r2_d65_tbl[24]' => { },
'r2_d65_tbl[25]' => { },
'r2_d65_tbl[26]' => { },
'r2_d65_tbl[27]' => { },
'r2_d65_tbl[28]' => { },
'r2_d65_tbl[29]' => { },
'r2_d65_tbl[30]' => { },
'r2_d65_tbl[31]' => { },
'r2_d65_red_ctbl[0]' => { },
'r2_d65_red_ctbl[1]' => { },
'r2_d65_red_ctbl[2]' => { },
'r2_d65_red_ctbl[3]' => { },
'r2_d65_red_ctbl[4]' => { },
'r2_d65_red_ctbl[5]' => { },
'r2_d65_red_ctbl[6]' => { },
'r2_d65_red_ctbl[7]' => { },
'r2_d65_red_ctbl[8]' => { },
'r2_d65_red_ctbl[9]' => { },
'r2_d65_red_ctbl[10]' => { },
'r2_d65_red_ctbl[11]' => { },
'r2_d65_red_ctbl[12]' => { },
'r2_d65_red_ctbl[13]' => { },
'r2_d65_red_ctbl[14]' => { },
'r2_d65_red_ctbl[15]' => { },
'r2_d65_red_ctbl[16]' => { },
'r2_d65_red_ctbl[17]' => { },
'r2_d65_red_ctbl[18]' => { },
'r2_d65_red_ctbl[19]' => { },
'r2_d65_red_ctbl[20]' => { },
'r2_d65_red_ctbl[21]' => { },
'r2_d65_red_ctbl[22]' => { },
'r2_d65_red_ctbl[23]' => { },
'r2_d65_red_ctbl[24]' => { },
'r2_d65_red_ctbl[25]' => { },
'r2_d65_red_ctbl[26]' => { },
'r2_d65_red_ctbl[27]' => { },
'r2_d65_red_ctbl[28]' => { },
'r2_d65_red_ctbl[29]' => { },
'r2_d65_red_ctbl[30]' => { },
'r2_d65_red_ctbl[31]' => { },
'r2_d65_green_ctbl[0]' => { },
'r2_d65_green_ctbl[1]' => { },
'r2_d65_green_ctbl[2]' => { },
'r2_d65_green_ctbl[3]' => { },
'r2_d65_green_ctbl[4]' => { },
'r2_d65_green_ctbl[5]' => { },
'r2_d65_green_ctbl[6]' => { },
'r2_d65_green_ctbl[7]' => { },
'r2_d65_green_ctbl[8]' => { },
'r2_d65_green_ctbl[9]' => { },
'r2_d65_green_ctbl[10]' => { },
'r2_d65_green_ctbl[11]' => { },
'r2_d65_green_ctbl[12]' => { },
'r2_d65_green_ctbl[13]' => { },
'r2_d65_green_ctbl[14]' => { },
'r2_d65_green_ctbl[15]' => { },
'r2_d65_green_ctbl[16]' => { },
'r2_d65_green_ctbl[17]' => { },
'r2_d65_green_ctbl[18]' => { },
'r2_d65_green_ctbl[19]' => { },
'r2_d65_green_ctbl[20]' => { },
'r2_d65_green_ctbl[21]' => { },
'r2_d65_green_ctbl[22]' => { },
'r2_d65_green_ctbl[23]' => { },
'r2_d65_green_ctbl[24]' => { },
'r2_d65_green_ctbl[25]' => { },
'r2_d65_green_ctbl[26]' => { },
'r2_d65_green_ctbl[27]' => { },
'r2_d65_green_ctbl[28]' => { },
'r2_d65_green_ctbl[29]' => { },
'r2_d65_green_ctbl[30]' => { },
'r2_d65_green_ctbl[31]' => { },
'r2_d65_blue_ctbl[0]' => { },
'r2_d65_blue_ctbl[1]' => { },
'r2_d65_blue_ctbl[2]' => { },
'r2_d65_blue_ctbl[3]' => { },
'r2_d65_blue_ctbl[4]' => { },
'r2_d65_blue_ctbl[5]' => { },
'r2_d65_blue_ctbl[6]' => { },
'r2_d65_blue_ctbl[7]' => { },
'r2_d65_blue_ctbl[8]' => { },
'r2_d65_blue_ctbl[9]' => { },
'r2_d65_blue_ctbl[10]' => { },
'r2_d65_blue_ctbl[11]' => { },
'r2_d65_blue_ctbl[12]' => { },
'r2_d65_blue_ctbl[13]' => { },
'r2_d65_blue_ctbl[14]' => { },
'r2_d65_blue_ctbl[15]' => { },
'r2_d65_blue_ctbl[16]' => { },
'r2_d65_blue_ctbl[17]' => { },
'r2_d65_blue_ctbl[18]' => { },
'r2_d65_blue_ctbl[19]' => { },
'r2_d65_blue_ctbl[20]' => { },
'r2_d65_blue_ctbl[21]' => { },
'r2_d65_blue_ctbl[22]' => { },
'r2_d65_blue_ctbl[23]' => { },
'r2_d65_blue_ctbl[24]' => { },
'r2_d65_blue_ctbl[25]' => { },
'r2_d65_blue_ctbl[26]' => { },
'r2_d65_blue_ctbl[27]' => { },
'r2_d65_blue_ctbl[28]' => { },
'r2_d65_blue_ctbl[29]' => { },
'r2_d65_blue_ctbl[30]' => { },
'r2_d65_blue_ctbl[31]' => { },
'r2_d65_red_stbl[0]' => { },
'r2_d65_red_stbl[1]' => { },
'r2_d65_red_stbl[2]' => { },
'r2_d65_red_stbl[3]' => { },
'r2_d65_red_stbl[4]' => { },
'r2_d65_red_stbl[5]' => { },
'r2_d65_red_stbl[6]' => { },
'r2_d65_red_stbl[7]' => { },
'r2_d65_red_stbl[8]' => { },
'r2_d65_red_stbl[9]' => { },
'r2_d65_red_stbl[10]' => { },
'r2_d65_red_stbl[11]' => { },
'r2_d65_red_stbl[12]' => { },
'r2_d65_red_stbl[13]' => { },
'r2_d65_red_stbl[14]' => { },
'r2_d65_red_stbl[15]' => { },
'r2_d65_red_stbl[16]' => { },
'r2_d65_red_stbl[17]' => { },
'r2_d65_red_stbl[18]' => { },
'r2_d65_red_stbl[19]' => { },
'r2_d65_red_stbl[20]' => { },
'r2_d65_red_stbl[21]' => { },
'r2_d65_red_stbl[22]' => { },
'r2_d65_red_stbl[23]' => { },
'r2_d65_red_stbl[24]' => { },
'r2_d65_red_stbl[25]' => { },
'r2_d65_red_stbl[26]' => { },
'r2_d65_red_stbl[27]' => { },
'r2_d65_red_stbl[28]' => { },
'r2_d65_red_stbl[29]' => { },
'r2_d65_red_stbl[30]' => { },
'r2_d65_red_stbl[31]' => { },
'r2_d65_blue_stbl[0]' => { },
'r2_d65_blue_stbl[1]' => { },
'r2_d65_blue_stbl[2]' => { },
'r2_d65_blue_stbl[3]' => { },
'r2_d65_blue_stbl[4]' => { },
'r2_d65_blue_stbl[5]' => { },
'r2_d65_blue_stbl[6]' => { },
'r2_d65_blue_stbl[7]' => { },
'r2_d65_blue_stbl[8]' => { },
'r2_d65_blue_stbl[9]' => { },
'r2_d65_blue_stbl[10]' => { },
'r2_d65_blue_stbl[11]' => { },
'r2_d65_blue_stbl[12]' => { },
'r2_d65_blue_stbl[13]' => { },
'r2_d65_blue_stbl[14]' => { },
'r2_d65_blue_stbl[15]' => { },
'r2_d65_blue_stbl[16]' => { },
'r2_d65_blue_stbl[17]' => { },
'r2_d65_blue_stbl[18]' => { },
'r2_d65_blue_stbl[19]' => { },
'r2_d65_blue_stbl[20]' => { },
'r2_d65_blue_stbl[21]' => { },
'r2_d65_blue_stbl[22]' => { },
'r2_d65_blue_stbl[23]' => { },
'r2_d65_blue_stbl[24]' => { },
'r2_d65_blue_stbl[25]' => { },
'r2_d65_blue_stbl[26]' => { },
'r2_d65_blue_stbl[27]' => { },
'r2_d65_blue_stbl[28]' => { },
'r2_d65_blue_stbl[29]' => { },
'r2_d65_blue_stbl[30]' => { },
'r2_d65_blue_stbl[31]' => { },
'r2_d65_green_stbl[0]' => { },
'r2_d65_green_stbl[1]' => { },
'r2_d65_green_stbl[2]' => { },
'r2_d65_green_stbl[3]' => { },
'r2_d65_green_stbl[4]' => { },
'r2_d65_green_stbl[5]' => { },
'r2_d65_green_stbl[6]' => { },
'r2_d65_green_stbl[7]' => { },
'r2_d65_green_stbl[8]' => { },
'r2_d65_green_stbl[9]' => { },
'r2_d65_green_stbl[10]' => { },
'r2_d65_green_stbl[11]' => { },
'r2_d65_green_stbl[12]' => { },
'r2_d65_green_stbl[13]' => { },
'r2_d65_green_stbl[14]' => { },
'r2_d65_green_stbl[15]' => { },
'r2_d65_green_stbl[16]' => { },
'r2_d65_green_stbl[17]' => { },
'r2_d65_green_stbl[18]' => { },
'r2_d65_green_stbl[19]' => { },
'r2_d65_green_stbl[20]' => { },
'r2_d65_green_stbl[21]' => { },
'r2_d65_green_stbl[22]' => { },
'r2_d65_green_stbl[23]' => { },
'r2_d65_green_stbl[24]' => { },
'r2_d65_green_stbl[25]' => { },
'r2_d65_green_stbl[26]' => { },
'r2_d65_green_stbl[27]' => { },
'r2_d65_green_stbl[28]' => { },
'r2_d65_green_stbl[29]' => { },
'r2_d65_green_stbl[30]' => { },
'r2_d65_green_stbl[31]' => { },
'r2_a_cx' => { },
'r2_a_cy' => { },
'r2_a_width' => { },
'r2_a_height' => { },
'r2_a_intervals' => { },
'r2_a_tbl[0]' => { },
'r2_a_tbl[1]' => { },
'r2_a_tbl[2]' => { },
'r2_a_tbl[3]' => { },
'r2_a_tbl[4]' => { },
'r2_a_tbl[5]' => { },
'r2_a_tbl[6]' => { },
'r2_a_tbl[7]' => { },
'r2_a_tbl[8]' => { },
'r2_a_tbl[9]' => { },
'r2_a_tbl[10]' => { },
'r2_a_tbl[11]' => { },
'r2_a_tbl[12]' => { },
'r2_a_tbl[13]' => { },
'r2_a_tbl[14]' => { },
'r2_a_tbl[15]' => { },
'r2_a_tbl[16]' => { },
'r2_a_tbl[17]' => { },
'r2_a_tbl[18]' => { },
'r2_a_tbl[19]' => { },
'r2_a_tbl[20]' => { },
'r2_a_tbl[21]' => { },
'r2_a_tbl[22]' => { },
'r2_a_tbl[23]' => { },
'r2_a_tbl[24]' => { },
'r2_a_tbl[25]' => { },
'r2_a_tbl[26]' => { },
'r2_a_tbl[27]' => { },
'r2_a_tbl[28]' => { },
'r2_a_tbl[29]' => { },
'r2_a_tbl[30]' => { },
'r2_a_tbl[31]' => { },
'r2_a_red_ctbl[0]' => { },
'r2_a_red_ctbl[1]' => { },
'r2_a_red_ctbl[2]' => { },
'r2_a_red_ctbl[3]' => { },
'r2_a_red_ctbl[4]' => { },
'r2_a_red_ctbl[5]' => { },
'r2_a_red_ctbl[6]' => { },
'r2_a_red_ctbl[7]' => { },
'r2_a_red_ctbl[8]' => { },
'r2_a_red_ctbl[9]' => { },
'r2_a_red_ctbl[10]' => { },
'r2_a_red_ctbl[11]' => { },
'r2_a_red_ctbl[12]' => { },
'r2_a_red_ctbl[13]' => { },
'r2_a_red_ctbl[14]' => { },
'r2_a_red_ctbl[15]' => { },
'r2_a_red_ctbl[16]' => { },
'r2_a_red_ctbl[17]' => { },
'r2_a_red_ctbl[18]' => { },
'r2_a_red_ctbl[19]' => { },
'r2_a_red_ctbl[20]' => { },
'r2_a_red_ctbl[21]' => { },
'r2_a_red_ctbl[22]' => { },
'r2_a_red_ctbl[23]' => { },
'r2_a_red_ctbl[24]' => { },
'r2_a_red_ctbl[25]' => { },
'r2_a_red_ctbl[26]' => { },
'r2_a_red_ctbl[27]' => { },
'r2_a_red_ctbl[28]' => { },
'r2_a_red_ctbl[29]' => { },
'r2_a_red_ctbl[30]' => { },
'r2_a_red_ctbl[31]' => { },
'r2_a_green_ctbl[0]' => { },
'r2_a_green_ctbl[1]' => { },
'r2_a_green_ctbl[2]' => { },
'r2_a_green_ctbl[3]' => { },
'r2_a_green_ctbl[4]' => { },
'r2_a_green_ctbl[5]' => { },
'r2_a_green_ctbl[6]' => { },
'r2_a_green_ctbl[7]' => { },
'r2_a_green_ctbl[8]' => { },
'r2_a_green_ctbl[9]' => { },
'r2_a_green_ctbl[10]' => { },
'r2_a_green_ctbl[11]' => { },
'r2_a_green_ctbl[12]' => { },
'r2_a_green_ctbl[13]' => { },
'r2_a_green_ctbl[14]' => { },
'r2_a_green_ctbl[15]' => { },
'r2_a_green_ctbl[16]' => { },
'r2_a_green_ctbl[17]' => { },
'r2_a_green_ctbl[18]' => { },
'r2_a_green_ctbl[19]' => { },
'r2_a_green_ctbl[20]' => { },
'r2_a_green_ctbl[21]' => { },
'r2_a_green_ctbl[22]' => { },
'r2_a_green_ctbl[23]' => { },
'r2_a_green_ctbl[24]' => { },
'r2_a_green_ctbl[25]' => { },
'r2_a_green_ctbl[26]' => { },
'r2_a_green_ctbl[27]' => { },
'r2_a_green_ctbl[28]' => { },
'r2_a_green_ctbl[29]' => { },
'r2_a_green_ctbl[30]' => { },
'r2_a_green_ctbl[31]' => { },
'r2_a_blue_ctbl[0]' => { },
'r2_a_blue_ctbl[1]' => { },
'r2_a_blue_ctbl[2]' => { },
'r2_a_blue_ctbl[3]' => { },
'r2_a_blue_ctbl[4]' => { },
'r2_a_blue_ctbl[5]' => { },
'r2_a_blue_ctbl[6]' => { },
'r2_a_blue_ctbl[7]' => { },
'r2_a_blue_ctbl[8]' => { },
'r2_a_blue_ctbl[9]' => { },
'r2_a_blue_ctbl[10]' => { },
'r2_a_blue_ctbl[11]' => { },
'r2_a_blue_ctbl[12]' => { },
'r2_a_blue_ctbl[13]' => { },
'r2_a_blue_ctbl[14]' => { },
'r2_a_blue_ctbl[15]' => { },
'r2_a_blue_ctbl[16]' => { },
'r2_a_blue_ctbl[17]' => { },
'r2_a_blue_ctbl[18]' => { },
'r2_a_blue_ctbl[19]' => { },
'r2_a_blue_ctbl[20]' => { },
'r2_a_blue_ctbl[21]' => { },
'r2_a_blue_ctbl[22]' => { },
'r2_a_blue_ctbl[23]' => { },
'r2_a_blue_ctbl[24]' => { },
'r2_a_blue_ctbl[25]' => { },
'r2_a_blue_ctbl[26]' => { },
'r2_a_blue_ctbl[27]' => { },
'r2_a_blue_ctbl[28]' => { },
'r2_a_blue_ctbl[29]' => { },
'r2_a_blue_ctbl[30]' => { },
'r2_a_blue_ctbl[31]' => { },
'r2_a_red_stbl[0]' => { },
'r2_a_red_stbl[1]' => { },
'r2_a_red_stbl[2]' => { },
'r2_a_red_stbl[3]' => { },
'r2_a_red_stbl[4]' => { },
'r2_a_red_stbl[5]' => { },
'r2_a_red_stbl[6]' => { },
'r2_a_red_stbl[7]' => { },
'r2_a_red_stbl[8]' => { },
'r2_a_red_stbl[9]' => { },
'r2_a_red_stbl[10]' => { },
'r2_a_red_stbl[11]' => { },
'r2_a_red_stbl[12]' => { },
'r2_a_red_stbl[13]' => { },
'r2_a_red_stbl[14]' => { },
'r2_a_red_stbl[15]' => { },
'r2_a_red_stbl[16]' => { },
'r2_a_red_stbl[17]' => { },
'r2_a_red_stbl[18]' => { },
'r2_a_red_stbl[19]' => { },
'r2_a_red_stbl[20]' => { },
'r2_a_red_stbl[21]' => { },
'r2_a_red_stbl[22]' => { },
'r2_a_red_stbl[23]' => { },
'r2_a_red_stbl[24]' => { },
'r2_a_red_stbl[25]' => { },
'r2_a_red_stbl[26]' => { },
'r2_a_red_stbl[27]' => { },
'r2_a_red_stbl[28]' => { },
'r2_a_red_stbl[29]' => { },
'r2_a_red_stbl[30]' => { },
'r2_a_red_stbl[31]' => { },
'r2_a_blue_stbl[0]' => { },
'r2_a_blue_stbl[1]' => { },
'r2_a_blue_stbl[2]' => { },
'r2_a_blue_stbl[3]' => { },
'r2_a_blue_stbl[4]' => { },
'r2_a_blue_stbl[5]' => { },
'r2_a_blue_stbl[6]' => { },
'r2_a_blue_stbl[7]' => { },
'r2_a_blue_stbl[8]' => { },
'r2_a_blue_stbl[9]' => { },
'r2_a_blue_stbl[10]' => { },
'r2_a_blue_stbl[11]' => { },
'r2_a_blue_stbl[12]' => { },
'r2_a_blue_stbl[13]' => { },
'r2_a_blue_stbl[14]' => { },
'r2_a_blue_stbl[15]' => { },
'r2_a_blue_stbl[16]' => { },
'r2_a_blue_stbl[17]' => { },
'r2_a_blue_stbl[18]' => { },
'r2_a_blue_stbl[19]' => { },
'r2_a_blue_stbl[20]' => { },
'r2_a_blue_stbl[21]' => { },
'r2_a_blue_stbl[22]' => { },
'r2_a_blue_stbl[23]' => { },
'r2_a_blue_stbl[24]' => { },
'r2_a_blue_stbl[25]' => { },
'r2_a_blue_stbl[26]' => { },
'r2_a_blue_stbl[27]' => { },
'r2_a_blue_stbl[28]' => { },
'r2_a_blue_stbl[29]' => { },
'r2_a_blue_stbl[30]' => { },
'r2_a_blue_stbl[31]' => { },
'r2_a_green_stbl[0]' => { },
'r2_a_green_stbl[1]' => { },
'r2_a_green_stbl[2]' => { },
'r2_a_green_stbl[3]' => { },
'r2_a_green_stbl[4]' => { },
'r2_a_green_stbl[5]' => { },
'r2_a_green_stbl[6]' => { },
'r2_a_green_stbl[7]' => { },
'r2_a_green_stbl[8]' => { },
'r2_a_green_stbl[9]' => { },
'r2_a_green_stbl[10]' => { },
'r2_a_green_stbl[11]' => { },
'r2_a_green_stbl[12]' => { },
'r2_a_green_stbl[13]' => { },
'r2_a_green_stbl[14]' => { },
'r2_a_green_stbl[15]' => { },
'r2_a_green_stbl[16]' => { },
'r2_a_green_stbl[17]' => { },
'r2_a_green_stbl[18]' => { },
'r2_a_green_stbl[19]' => { },
'r2_a_green_stbl[20]' => { },
'r2_a_green_stbl[21]' => { },
'r2_a_green_stbl[22]' => { },
'r2_a_green_stbl[23]' => { },
'r2_a_green_stbl[24]' => { },
'r2_a_green_stbl[25]' => { },
'r2_a_green_stbl[26]' => { },
'r2_a_green_stbl[27]' => { },
'r2_a_green_stbl[28]' => { },
'r2_a_green_stbl[29]' => { },
'r2_a_green_stbl[30]' => { },
'r2_a_green_stbl[31]' => { },
'def_cor_c0' => { },
'def_cor_c1' => { },
'def_cor_c2' => { },
'def_cor_c3' => { },
'def_cor_c4' => { },
'def_cor_c5' => { },
'def_cor_c6' => { },
'def_cor_c7' => { },
'def_cor_c8' => { },
'def_cor_k0' => { },
'def_cor_k1' => { },
'def_cor_k2' => { },
'yhi_ylo_cor_c0' => { },
'yhi_ylo_cor_c1' => { },
'yhi_ylo_cor_c2' => { },
'yhi_ylo_cor_c3' => { },
'yhi_ylo_cor_c4' => { },
'yhi_ylo_cor_c5' => { },
'yhi_ylo_cor_c6' => { },
'yhi_ylo_cor_c7' => { },
'yhi_ylo_cor_c8' => { },
'yhi_ylo_cor_k0' => { },
'yhi_ylo_cor_k1' => { },
'yhi_ylo_cor_k2' => { },
'def_conv_chrm_a_m' => { },
'def_conv_chrm_a_p' => { },
'def_conv_chrm_b_m' => { },
'def_conv_chrm_b_p' => { },
'def_conv_chrm_c_m' => { },
'def_conv_chrm_c_p' => { },
'def_conv_chrm_d_m' => { },
'def_conv_chrm_d_p' => { },
'def_conv_chrm_k_cb' => { },
'def_conv_chrm_k_cr' => { },
'def_conv_luma_v0' => { },
'def_conv_luma_v1' => { },
'def_conv_luma_v2' => { },
'def_conv_luma_k' => { },
'tl84_conv_chrm_a_m' => { },
'tl84_conv_chrm_a_p' => { },
'tl84_conv_chrm_b_m' => { },
'tl84_conv_chrm_b_p' => { },
'tl84_conv_chrm_c_m' => { },
'tl84_conv_chrm_c_p' => { },
'tl84_conv_chrm_d_m' => { },
'tl84_conv_chrm_d_p' => { },
'tl84_conv_chrm_k_cb' => { },
'tl84_conv_chrm_k_cr' => { },
'tl84_conv_luma_v0' => { },
'tl84_conv_luma_v1' => { },
'tl84_conv_luma_v2' => { },
'tl84_conv_luma_k' => { },
'incand_conv_chrm_a_m' => { },
'incand_conv_chrm_a_p' => { },
'incand_conv_chrm_b_m' => { },
'incand_conv_chrm_b_p' => { },
'incand_conv_chrm_c_m' => { },
'incand_conv_chrm_c_p' => { },
'incand_conv_chrm_d_m' => { },
'incand_conv_chrm_d_p' => { },
'incand_conv_chrm_k_cb' => { },
'incand_conv_chrm_k_cr' => { },
'incand_conv_luma_v0' => { },
'incand_conv_luma_v1' => { },
'incand_conv_luma_v2' => { },
'incand_conv_luma_k' => { },
'daylt_conv_chrm_a_m' => { },
'daylt_conv_chrm_a_p' => { },
'daylt_conv_chrm_b_m' => { },
'daylt_conv_chrm_b_p' => { },
'daylt_conv_chrm_c_m' => { },
'daylt_conv_chrm_c_p' => { },
'daylt_conv_chrm_d_m' => { },
'daylt_conv_chrm_d_p' => { },
'daylt_conv_chrm_k_cb' => { },
'daylt_conv_chrm_k_cr' => { },
'daylt_conv_luma_v0' => { },
'daylt_conv_luma_v1' => { },
'daylt_conv_luma_v2' => { },
'daylt_conv_luma_k' => { },
'yhi_ylo_conv_chrm_a_m' => { },
'yhi_ylo_conv_chrm_a_p' => { },
'yhi_ylo_conv_chrm_b_m' => { },
'yhi_ylo_conv_chrm_b_p' => { },
'yhi_ylo_conv_chrm_c_m' => { },
'yhi_ylo_conv_chrm_c_p' => { },
'yhi_ylo_conv_chrm_d_m' => { },
'yhi_ylo_conv_chrm_d_p' => { },
'yhi_ylo_conv_chrm_k_cb' => { },
'yhi_ylo_conv_chrm_k_cr' => { },
'yhi_ylo_conv_luma_v0' => { },
'yhi_ylo_conv_luma_v1' => { },
'yhi_ylo_conv_luma_v2' => { },
'yhi_ylo_conv_luma_k' => { },
'gamma_enable' => { },
'def_luma_gamma_mode' => { },
'def_rgb_gamma_mode' => { },
'blck_lvl_even_cols' => { },
'blck_lvl_odd_cols' => { },
'defect_pix_min_thresh' => { },
'defect_pix_max_thresh' => { },
'defect_pix_cor_enable' => { },
'prview_resol' => { },
'snapshot_resol' => { },
'curr_resol' => { },
'sensor_fmt' => { },
'discard_frst_frm' => { },
'frm_skip_pttrn' => { },
'sensor_type' => { },
'max_video_fps' => { },
'video_fps' => { },
'max_prview_fps' => { },
'prview_fps' => { },
'nghtsht_fps' => { },
'sensr_ful_wdth' => { },
'sensr_ful_hght' => { },
'sensr_qtr_wdth' => { },
'sensr_qtr_hght' => { },
'nightshot_mode' => { },
'pclk_invert' => { },
'cam_mclk_hz' => { },
'chrom_supress' => { },
'chro_sup_luma_thres_1' => { },
'chro_sup_luma_thres_2' => { },
'chro_sup_luma_thres_3' => { },
'chro_sup_luma_thres_4' => { },
'chro_sup_chro_thres_1' => { },
'chro_sup_chro_thres_2' => { },
'la_detect' => { },
'la_enable' => { },
'HJR_enable' => { },
'HJR_max_num_frames' => { },
'HJR_one_to_two_offset' => { },
'HJR_n_reduction_flat' => { },
'HJR_n_reduction_texture' => { },
'HJR_texture_threshold' => { },
);
# generate tag names and descriptions
{
local $_;
my $table = \%Image::ExifTool::Qualcomm::Main;
MakeNameAndDesc($_, $$table{$_}) foreach TagTableKeys($table);
}
#------------------------------------------------------------------------------
# Generate tag Name and Description from a Qualcomm tag ID
# Inputs: 0) tag ID, 1) tagInfo ref
# Returns: true on success
sub MakeNameAndDesc($$)
{
local $_ = shift;
my $tagInfo = shift;
# capitalize various leading acronyms or just first letter...
s/^(asf|awb|aec|afr|af_|la_|r2_tl|tl)/\U$1/ or $_ = ucfirst;
s/_([a-z])/_\u$1/g; # capitalize first letter of each word
s/\[(\d+)\]$/sprintf("_%.2d",$1)/e; # use 2-digit subscripts (and remove brackets)
tr/-_a-zA-Z0-9//dc; # delete invalid characters
my $desc = $_;
# convert underlines to spaces in description
if ($desc =~ tr/_/ /) {
# remove unnecessary underlines from tag name...
s/_([A-Z][a-z])/$1/g;
s/([a-z0-9])_([A-Z])/$1$2/g;
s/([A-Za-z])_(\d)/$1$2/g;
}
return 0 unless length;
$$tagInfo{Name} = $_;
$$tagInfo{Description} = $desc;
return 1;
}
#------------------------------------------------------------------------------
# Process Qualcomm APP7 metadata (ref PH)
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success
sub ProcessQualcomm($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $dataPos = $$dirInfo{DataPos};
my $pos = $$dirInfo{DirStart};
my $dirEnd = $pos + $$dirInfo{DirLen};
$et->VerboseDir('Qualcomm', undef, $$dirInfo{DirLen});
SetByteOrder('II');
while ($pos + 3 < $dirEnd) {
my $valLen = Get16u($dataPt, $pos);
my $tagLen = Get8u($dataPt, $pos + 2);
last if $pos + 8 + $tagLen + $valLen > $dirEnd;
my $tag = substr($$dataPt, $pos + 3, $tagLen);
$pos += 3 + $tagLen; # point to format byte
my $fmt = Get8u($dataPt, $pos);
# (not sure what these counts are for -- both are always 1 in my samples)
#my $cnt1 = Get16u($dataPt, $pos + 1);
#my $cnt2 = Get16u($dataPt, $pos + 3);
$pos += 5; # point to start of value data
my ($val, $format);
if ($fmt <= 7) {
$format = $qualcommFormat[$fmt];
$val = ReadValue($dataPt, $pos, $format, undef, $valLen);
} else {
$format = "format $fmt";
my $value = substr($$dataPt, $pos, $valLen);
$val = \$value;
}
unless (defined $$tagTablePtr{$tag} or $Image::ExifTool::specialTags{$tag}) {
my %tagInfo;
if (MakeNameAndDesc($tag, \%tagInfo)) {
$et->VPrint(0, $$et{INDENT}, "[adding Qualcomm:$tagInfo{Name}]\n");
AddTagToTable($tagTablePtr, $tag, \%tagInfo);
}
}
$et->HandleTag($tagTablePtr, $tag, $val,
DataPt => $dataPt,
DataPos => $dataPos,
Start => $pos,
Size => $valLen,
Format => $format,
);
$pos += $valLen; # point to start of next entry
}
return 1;
}
1; # end
__END__
=head1 NAME
Image::ExifTool::Qualcomm - Read Qualcomm APP7 meta information
=head1 SYNOPSIS
This module is loaded automatically by Image::ExifTool when required.
=head1 DESCRIPTION
This module contains definitions required by Image::ExifTool to read
information from the APP7 Qualcomm segment in JPEG images.
=head1 AUTHOR
Copyright 2003-2019, Phil Harvey (phil at owl.phy.queensu.ca)
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 SEE ALSO
L<Image::ExifTool::TagNames/Qualcomm Tags>,
L<Image::ExifTool(3pm)|Image::ExifTool>
=cut
| 33.065428 | 86 | 0.513525 |
73db0789c08e5472eeb1c030c233f1afdaff42ec | 53,702 | pm | Perl | libs/perllib/LoxBerry/Log.pm | nufke/Loxberry | 58a7dc49bd6af9cbe44c431e05b46ad7bdaaa77d | [
"Apache-2.0"
] | null | null | null | libs/perllib/LoxBerry/Log.pm | nufke/Loxberry | 58a7dc49bd6af9cbe44c431e05b46ad7bdaaa77d | [
"Apache-2.0"
] | null | null | null | libs/perllib/LoxBerry/Log.pm | nufke/Loxberry | 58a7dc49bd6af9cbe44c431e05b46ad7bdaaa77d | [
"Apache-2.0"
] | null | null | null | # Please increment version number on EVERY change
# Major.Minor represents LoxBerry version (e.g. 0.3.1.12 = LoxBerry V0.3.1 the 12th change)
use strict;
use Carp;
use LoxBerry::System;
# use Time::Piece;
# use HTML::Entities;
# use JSON;
# use File::Basename;
# use File::Path;
################################################################
package LoxBerry::Log;
our $VERSION = "2.4.0.1";
our $DEBUG;
# This object is the object the exported LOG* functions use
our $mainobj;
our $packagedb;
#
my $packagedbfile = "$LoxBerry::System::lbsdatadir/logpackages.json";
our %severitylist = (
0 => 'EMERGE',
1 => 'ALERT',
2 => 'CRITICAL',
3 => 'ERROR',
4 => 'WARNING',
5 => 'OK',
6 => 'INFO',
7 => 'DEBUG' );
### Exports ###
use base 'Exporter';
our @EXPORT = qw (
notify
notify_ext
delete_notifications
delete_notification_key
get_notification_count
get_notifications
parsedatestring
);
# Variables
my $notifymailerror;
my @db_attribute_exclude_list = qw ( package name LOGSTART LOGEND LASTMODIFIED filename dbh _FH dbkey loxberry_uid loxberry_gid _plugindb_timestamp);
################################################################
## Constructor
## Params [square brackets mean optional]
## See https://www.loxwiki.eu/x/pQHgAQ
##################################################################
sub new
{
my $class = shift;
# print STDERR "Class: $class\n";
if (@_ % 2) {
Carp::croak "Illegal parameter list has odd number of values\n" . join("\n", @_) . "\n";
}
my %params = @_;
my $self = {
name => $params{name},
filename => $params{filename},
logdir => $params{logdir},
append => $params{append},
package => $params{package},
loglevel => $params{loglevel},
stderr => $params{stderr},
stdout => $params{stdout},
nofile => $params{nofile},
autoraise => $params{nofile},
addtime => $params{addtime},
dbkey => $params{dbkey},
nosession => $params{nosession},
};
bless $self, $class;
if ($self->{autoraise} eq "") {
$self->{autoraise} = 1;
}
if ( LoxBerry::System::is_enabled($self->{nosession}) ) {
$self->{append} = 1;
}
# If nofile is given, we don't need to do any smart things
if(!$self->{nofile}) {
# If a dbkey was given, recreate logging session
if($params{dbkey}) {
my $recreatestate = $self->log_db_recreate_session_by_id();
return undef if (!$recreatestate);
$self->{append} = 1;
}
# Setting package
# print STDERR "Package: " . $self->{package} . "\n";
if (!$self->{package}) {
if ($LoxBerry::System::lbpplugindir) {
$self->{package} = $LoxBerry::System::lbpplugindir;
}
if (!$self->{package}) {
Carp::croak "A 'package' must be defined if this log is not from a plugin";
}
}
if (!$self->{logdir} && !$self->{filename} && -e $LoxBerry::System::lbplogdir) {
$self->{logdir} = $LoxBerry::System::lbplogdir;
}
# print STDERR "2. logdir: " . $self->{logdir} . " filename: " . $self->{filename} . "\n";
if ($self->{logdir} && !$self->{filename}) {
$self->{filename} = $self->{logdir} . "/" . LoxBerry::System::currtime('filehires') . "_" . $self->{name} . ".log";
# print STDERR "3. logdir: " . $self->{logdir} . " filename: " . $self->{filename} . "\n";
} elsif (!$self->{filename}) {
# print STDERR "4. logdir: " . $self->{logdir} . " filename: " . $self->{filename} . "\n";
if ($LoxBerry::System::lbplogdir && -e $LoxBerry::System::lbplogdir) {
$self->{filename} = "$LoxBerry::System::lbplogdir/" . currtime('filehires') . "_" . $self->{name} . ".log";
# print STDERR "5. logdir: " . $self->{logdir} . " filename: " . $self->{filename} . "\n";
} else {
Carp::croak "Cannot determine plugin log directory";
}
}
if (!$self->{filename}) {
Carp::croak "Cannot smartly detect where your logfile should be placed. Check your parameters.";
}
}
# Get loglevel
# print STDERR "Log.pm: Loglevel is " . $self->{loglevel} . "\n";
if (!defined $self->{loglevel}) {
my $plugindata = LoxBerry::System::plugindata($self->{package});
if ($plugindata and defined $plugindata->{PLUGINDB_LOGLEVEL}) {
$self->{loglevel} = $plugindata->{'PLUGINDB_LOGLEVEL'};
} else {
$self->{loglevel} = 7;
$self->{loglevel_is_static} = 1;
}
} else {
$self->{loglevel_is_static} = 1;
}
# print STDERR "Log.pm: Loglevel is " . $self->{loglevel} . "\n";
# print STDERR "filename: " . $self->{filename} . "\n";
if (!$self->{append} and !$self->{nofile}) {
unlink $self->{filename};
require File::Basename;
my $dir = File::Basename::dirname($self->{filename});
if (! -d $dir) {
require File::Path;
File::Path::make_path($dir);
eval {
if(!$self->{loxberry_uid}) {
my (undef,undef,$uid,$gid) = getpwnam('loxberry');
$self->{loxberry_uid} = $uid;
$self->{loxberry_uid} = $gid;
}
chown $self->{loxberry_uid}, $self->{loxberry_uid}, $dir;
};
}
}
if (!$LoxBerry::Log::mainobj) {
$LoxBerry::Log::mainobj = $self;
}
# SQLite init
if( LoxBerry::System::is_enabled($params{nosession}) ) {
$self->{dbh} = log_db_init_database();
$self->{dbkey} = $self->log_db_get_session_by_filename();
} elsif($self->{append} && !$self->{nofile}) {
$self->{dbh} = log_db_init_database();
$self->{dbkey} = log_db_query_id($self->{dbh}, $self);
# print STDERR "Appending to file $self->{filename} with key $self->{dbkey}\n";
}
return $self;
}
sub loglevel
{
my $self = shift;
my $loglevel = shift;
if (defined $loglevel && $loglevel >= 0 && $loglevel <= 7) {
$self->{loglevel} = $loglevel;
$self->{loglevel_is_static} = 1;
}
return $self->{loglevel};
}
sub autoraise
{
my $self = shift;
my $param = shift;
if ($param == 0) {
undef $self->{autoraise};
} elsif ($param == 1) {
$self->{autoraise} = 1;
}
return $self->{autoraise};
}
# Legacy for LB <1.2.5
sub filehandle
{
my $self = shift;
if ($self->{'_FH'}) {
return $self->{'_FH'};
} else {
$self->open();
return $self->{'_FH'};
}
}
sub filename
{
my $self = shift;
if ($self->{filename}) {
return $self->{filename};
}
}
sub dbkey
{
my $self = shift;
if ($self->{dbkey}) {
return $self->{dbkey};
}
}
sub open
{
my $self = shift;
my $writetype = ">>";
my $fh;
eval {
open($fh, $writetype, $self->{filename});
$self->{'_FH'} = $fh if($fh);
};
if ($@) {
print STDERR "Cannot open logfile " . $self->{filename} . " (writetype " . $writetype . "): $@";
return;
}
eval {
if(!$self->{loxberry_uid}) {
my (undef,undef,$uid,$gid) = getpwnam('loxberry');
$self->{loxberry_uid} = $uid;
$self->{loxberry_uid} = $gid;
}
chown $self->{loxberry_uid}, $self->{loxberry_uid}, $fh;
chmod 0666, $fh;
};
}
sub close
{
my $self = shift;
close $self->{'_FH'} if $self->{'_FH'};
undef $self->{'_FH'};
return $self->{filename};
}
sub addtime
{
my $self = shift;
my $param = shift;
if ($param == 0) {
undef $self->{addtime};
} elsif ($param == 1) {
$self->{addtime} = 1;
}
return $self->{addtime};
}
sub logtitle
{
my $self = shift;
my $title = shift;
if ($title) {
$self->{LOGSTARTMESSAGE} = $title;
if (!$self->{nofile} and $self->{dbkey} and $self->{dbh}) {
eval {
my $dbh = $self->{dbh};
$dbh->do("UPDATE logs_attr SET value = '$self->{LOGSTARTMESSAGE}' WHERE keyref = $self->{dbkey} AND attrib = 'LOGSTARTMESSAGE';");
};
}
}
return $self->{LOGSTARTMESSAGE};
}
##########################################################
# Functions to enable strerr and stdout, and
# disable file writing (nofile)
##########################################################
sub stderr
{
my $self = shift;
my $param = shift;
if ($param == 0) {
undef $self->{stderr};
} elsif ($param == 1) {
$self->{stderr} = 1;
}
return $self->{stderr};
}
sub stdout
{
my $self = shift;
my $param = shift;
if ($param == 0) {
undef $self->{stdout};
} elsif ($param == 1) {
$self->{stdout} = 1;
}
return $self->{stdout};
}
sub nofile
{
my $self = shift;
my $param = shift;
if ($param == 0) {
undef $self->{nofile};
} elsif ($param == 1) {
$self->{nofile} = 1;
}
return $self->{nofile};
}
##################################################################################
# Writing to logfile function
##################################################################################
sub write
{
my $self = shift;
my $severity = shift;
my ($s)=@_;
# Check if the database entry is still present
if (!$self->{_next_db_check} or time > $self->{_next_db_check}) {
print STDERR "write: DB session check called\n" if ($DEBUG);
if(!$self->{dbh}) {
$self->{dbh} = log_db_init_database();
}
log_db_recreate_session($self->{dbh}, $self);
$self->{_next_db_check} = time+120;
}
# print STDERR "Severity: $severity / Loglevel: " . $self->{loglevel} . "\n";
# print STDERR "Log: $s\n";
# Do not log if loglevel is lower than severity
# print STDERR "--> write \n";
# print STDERR " autoraise\n";
# Change loglevel if it was changed in the UI aka PluginDB
if( !$self->{loglevel_is_static} and LoxBerry::System::plugindb_changed_time() != $self->{_plugindb_timestamp} ) {
$self->{_plugindb_timestamp} = LoxBerry::System::plugindb_changed_time();
my $newloglevel = LoxBerry::System::pluginloglevel($self->{package});
if ( defined $newloglevel and $newloglevel >= 0 and $newloglevel <=7 and $newloglevel != $self->{loglevel} ) {
my $oldloglevel = $self->{loglevel};
$self->{loglevel} = $newloglevel;
$self->write(-1, "<INFO> User changed loglevel from $oldloglevel to $newloglevel");
}
}
if ($severity <= 2 && $severity >= 0 && $self->{loglevel} < 6 && $self->{autoraise} == 1) {
# print STDERR " autoraise to loglevel 6\n";
$self->{loglevel} = 6;
$self->{loglevel_is_static} = 1;
}
if ((!defined($self->{STATUS}) or $severity < $self->{STATUS}) and $severity >= 0) {
# Remember highest severity sent
$self->{STATUS} = "$severity";
}
if($severity >= 0 and $severity <= 4) {
# Store all warnings, errors, etc. in a string
$self->{ATTENTIONMESSAGES} .= "\n" if ($self->{ATTENTIONMESSAGES});
$self->{ATTENTIONMESSAGES} .= '<' . $severitylist{$severity} . '> ' . $s;
# Truncate ATTENTIONMESSAGES
my $strmaxlen = 6000;
if( length($self->{ATTENTIONMESSAGES}) > $strmaxlen ) {
$self->{ATTENTIONMESSAGES} = substr( $self->{ATTENTIONMESSAGES}, -$strmaxlen+200 );
$self->{ATTENTIONMESSAGES} = substr( $self->{ATTENTIONMESSAGES}, index( $self->{ATTENTIONMESSAGES}, "\n" )+1 );
}
}
if ($self->{loglevel} != 0 and $severity <= $self->{loglevel} or $severity < 0) {
#print STDERR "Not filtered.\n";
if(!$self->{'_FH'}) {
$self->open();
}
my $fh = $self->{'_FH'};
my $string;
my $currtime = "";
if ($self->{addtime} and $severity > -2) {
$currtime = LoxBerry::System::currtime('hrtimehires') . " ";
}
if ($severity == 7 or $severity < 0) {
$string = $currtime . $s . "\n";
} else {
$string = $currtime . '<' . $severitylist{$severity} . '> ' . $s . "\n";
}
if (!$self->{nofile} && $self->{loglevel} != 0) {
# print STDERR " Print to file\n";
print $fh $string if($fh);
}
if ($self->{stderr}) {
print STDERR $string;
}
if ($self->{stdout}) {
print STDOUT $string;
}
} else {
# print STDERR "Filtered: $s\n";
}
}
#################################################################################
# The severity functions
#################################################################################
# 0 => 'EMERGE', 1 => 'ALERT', 2 => 'CRITICAL', 3 => 'ERROR', 4 => 'WARNING', 5 => 'OK', 6 => 'INFO', 7 => 'DEBUG'
sub DEB
{
my $self = shift;
my ($s)=@_;
$self->write(7, $s);
$self->close();
}
sub INF
{
my $self = shift;
my ($s)=@_;
$self->write(6, $s);
$self->close();
}
sub OK
{
my $self = shift;
my ($s)=@_;
$self->write(5, $s);
$self->close();
}
sub WARN
{
my $self = shift;
my ($s)=@_;
$self->write(4, $s);
$self->close();
}
sub ERR
{
my $self = shift;
my ($s)=@_;
$self->write(3, $s);
$self->close();
}
sub CRIT
{
my $self = shift;
my ($s)=@_;
$self->write(2, $s);
$self->close();
}
sub ALERT
{
my $self = shift;
my ($s)=@_;
$self->write(1, $s);
$self->close();
}
sub EMERGE
{
my $self = shift;
my ($s)=@_;
$self->write(0, $s);
$self->close();
}
sub LOGSTART
{
my $self = shift;
my ($s)=@_;
# If nosession is given, only an initial header is written
if( !LoxBerry::System::is_enabled($self->{nosession}) ) {
# print STDERR "Logstart -->\n";
$self->{LOGSTARTBYTE} = -e $self->{filename} ? -s $self->{filename} : 0;
$self->write(-2, "================================================================================");
$self->write(-2, "<LOGSTART> " . LoxBerry::System::currtime . " TASK STARTED");
$self->write(-2, "<LOGSTART> " . $s);
}
$self->{LOGSTARTMESSAGE} = $s if ($s);
opendir(my $DIR, "$LoxBerry::System::lbsconfigdir/");
my @is_files = grep(/is\_.*\.cfg/,readdir($DIR));
closedir($DIR);
my $is_file_str = "";
foreach my $is_file (@is_files) {
$is_file_str .= substr($is_file, rindex($is_file, '/')+1) . " ";
}
if ($is_file_str) {
$is_file_str = "( " . $is_file_str . ")";
}
my $plugin = LoxBerry::System::plugindata($self->{package});
if( !LoxBerry::System::is_enabled($self->{nosession}) or ! -e $self->{filename} ) {
$self->write(-1, "<INFO> LoxBerry Version " . LoxBerry::System::lbversion() . " " . $is_file_str);
$self->write(-1, "<INFO> " . $plugin->{PLUGINDB_TITLE} . " Version " . $plugin->{PLUGINDB_VERSION} ) if ($plugin);
$self->write(-1, "<INFO> Loglevel: " . $self->{loglevel});
}
if( LoxBerry::System::is_enabled($self->{nosession})) {
# $self->write(-2, "<INFO> " . $s);
$self->OK($s);
}
if(! $self->{nofile}) {
if(!$self->{dbh}) {
$self->{dbh} = log_db_init_database();
}
if ( !LoxBerry::System::is_enabled($self->{nosession}) ) {
$self->{dbkey} = log_db_logstart($self->{dbh}, $self);
}
}
$self->close();
}
sub LOGEND
{
my $self = shift;
my ($s)=@_;
if( !LoxBerry::System::is_enabled($self->{nosession}) ) {
$self->write(-2, "<LOGEND> " . $s) if $s;
$self->write(-2, "<LOGEND> " . LoxBerry::System::currtime . " TASK FINISHED");
}
$self->{LOGENDMESSAGE} = $s if ($s);
if(!defined($self->{STATUS})) {
# If no status was collected, let's say it's ok
$self->{STATUS} = 5;
}
if(! $self->{nofile}) {
if(!$self->{dbh}) {
$self->{dbh} = log_db_init_database();
}
if(!$self->{dbkey}) {
$self->{dbkey} = log_db_query_id($self->{dbh}, $self);
}
log_db_logend($self->{dbh}, $self);
}
$self->{logend_called} = 1;
$self->DESTROY();
}
## Sets this log object the default object
sub default
{
my $self = shift;
$LoxBerry::Log::mainobj = $self;
}
# our $AUTOLOAD;
# sub AUTOLOAD {
# my $self = shift;
# # Remove qualifier from original method name...
# my $called = $AUTOLOAD =~ s/.*:://r;
# # Is there an attribute of that name?
# Carp::carp "No such attribute: $called"
# unless exists $self->{$called};
# # If so, return it...
# return $self->{$called};
# }
sub DESTROY {
my $self = shift;
if ($self->{"_FH"}) {
CORE::close $self->{"_FH"};
}
if ($LoxBerry::Log::mainobj == $self) {
# Reset default object
undef $LoxBerry::Log::mainobj;
};
if(!$self->{nofile} and $self->{dbkey}
and defined $self->{STATUS} and !$self->{logend_called}) {
if(!$self->{dbh}) {
$self->{dbh} = log_db_init_database();
}
my $dbh = $self->{dbh};
$dbh->do("INSERT OR REPLACE INTO logs_attr (keyref, attrib, value) VALUES (" . $self->{dbkey} . ", 'STATUS', '" . $self->{STATUS} . "');COMMIT;") if ($dbh);
}
}
################################################
# Database function for logging
################################################
# INTERNAL FUNCTIONS
sub log_db_init_database
{
require DBI;
print STDERR "log_db_init_database\n" if ($DEBUG);
my $dbfile = $LoxBerry::System::lbhomedir . "/log/system_tmpfs/logs_sqlite.dat";
my $dbh;
my $dores;
my $dbok = 1;
my $dbierr;
my $dbierrstr;
for (my $i=1; $i <= 2; $i++) {
$dbierr = undef;
$dbierrstr = undef;
$dbh = DBI->connect("dbi:SQLite:dbname=$dbfile","","") or
do {
print STDERR "log_init_database connect: $DBI::errstr\n";
return undef;
};
#$dbh->{sqlite_unicode} = 1;
$dbh->do('PRAGMA journal_mode = wal;');
$dbh->do('PRAGMA busy_timeout = 5000;');
$dbh->do('BEGIN;');
$dbh->do("CREATE TABLE IF NOT EXISTS logs (
PACKAGE VARCHAR(255) NOT NULL,
NAME VARCHAR(255) NOT NULL,
FILENAME VARCHAR (2048) NOT NULL,
LOGSTART DATETIME,
LOGEND DATETIME,
LASTMODIFIED DATETIME NOT NULL,
LOGKEY INTEGER PRIMARY KEY
)") or
do {
print STDERR "log_init_database create table notifications: $DBI::errstr\n";
$dbierr = $DBI::err;
$dbierrstr = $DBI::errstr;
$dbh->do('ROLLBACK;');
$dbok = 0;
log_db_repair($dbfile, $dbh, $dbierr);
};
$dbh->do("CREATE TABLE IF NOT EXISTS logs_attr (
keyref INTEGER NOT NULL,
attrib VARCHAR(255) NOT NULL,
value VARCHAR(255),
PRIMARY KEY ( keyref, attrib )
)") or
do {
print STDERR "log_db_init_database create table logs_attr: $DBI::errstr\n";
$dbierr = $DBI::err;
$dbierrstr = $DBI::errstr;
$dbh->do('ROLLBACK;');
$dbok = 0;
log_db_repair($dbfile, $dbh, $dbierr);
};
$dbh->do('COMMIT;');
if ($dbok) {
last;
}
}
if(!$dbok) {
print STDERR "log_db_init_database: FAILED TO RECOVER DATABASE (Database error $dbierr - $dbierrstr)\n";
# LoxBerry::Log::notify( "logmanager", "Log Database", "The logfile database sends an error and cannot automatically be recovered. Please inform the LoxBerry-Core team about this error:\nError $dbierr ($dbierrstr)", 'error');
return undef;
}
eval {
my $uid = (stat $dbfile)[4];
my $owner = (getpwuid $uid)[0];
if ($owner ne 'loxberry') {
my ($login,$pass,$uid,$gid) = getpwnam('loxberry');
chown $uid, $gid, $dbfile;
}
};
return $dbh;
}
sub log_db_repair
{
my ($dbfile, $dbh, $dbierror) = @_;
print STDERR "log_db_repair: Repairing DB (Error $dbierror)\n";
# https://www.sqlite.org/c3ref/c_abort.html
# 11 - The database disk image is malformed
if ($dbierror eq "11") {
print STDERR "logdb seems to be corrupted - deleting and recreating...\n";
$dbh->disconnect();
unlink $dbfile;
$dbh = DBI->connect("dbi:SQLite:dbname=$dbfile","","") or
do {
$dbh->disconnect() if ($dbh);
return undef;
};
} else {
$dbh->disconnect() if ($dbh);
return undef;
}
}
sub log_db_query_id
{
my $dbh = shift;
my %p = %{shift()};
# Check mandatory fields
Carp::cluck "log_db_query_id: No FILENAME defined\n" if (!$p{filename});
Carp::cluck "Create DB log entry: DBH not defined\n" if (!$dbh);
if (!$p{filename} or !$dbh) {
return;
}
# Search filename
my $qu = "SELECT LOGKEY FROM logs WHERE FILENAME LIKE '$p{filename}' ORDER BY LOGSTART DESC LIMIT 1;";
my ($logid) = $dbh->selectrow_array($qu) or
do {
Carp::carp "log_db_query_id: No database entry found for given filename $p{filename}. File will be created.\n";
return undef;
};
if ($logid) {
return $logid;
} else {
print STDERR "log_db_query_id: Could not find filename $p{filename}\n" if ($DEBUG);
}
return;
}
sub log_db_logstart
{
my $dbh = shift;
my %p = %{shift()};
# print STDERR "Package: " . $p{'package'} . "\n";
# Check mandatory fields
Carp::cluck "Create DB log entry: DBH not defined\n" if (!$dbh);
Carp::cluck "Create DB log entry: No PACKAGE defined\n" if (! $p{package});
Carp::cluck "Create DB log entry: No NAME defined\n" if (! $p{name});
Carp::cluck "Create DB log entry: No FILENAME defined\n" if (! $p{filename});
if(!$dbh or !$p{package} or !$p{name} or !$p{filename}) {
return;
}
if (!$p{LOGSTART}) {
require Time::Piece;
my $t = Time::Piece->localtime;
# my $t = localtime;
$p{LOGSTART} = $t->strftime("%Y-%m-%d %H:%M:%S");
}
my $plugin = LoxBerry::System::plugindata($p{package});
if ($plugin and $plugin->{PLUGINDB_TITLE}) {
$p{_ISPLUGIN} = 1;
$p{PLUGINTITLE} = $plugin->{PLUGINDB_TITLE};
}
# Start transaction
$dbh->do("BEGIN TRANSACTION;");
# Insert main attributes
my $sth = $dbh->prepare('INSERT INTO logs (PACKAGE, NAME, FILENAME, LOGSTART, LASTMODIFIED) VALUES (?, ?, ?, ?, ?) ;');
# print STDERR "package $p{package}, name $p{name}\n";
$sth->execute($p{package}, $p{name}, $p{filename} , $p{LOGSTART}, $p{LOGSTART}) or
do {
Carp::cluck "Error inserting log to DB: $DBI::errstr\n";
return undef;
};
my $id = $dbh->sqlite_last_insert_rowid();
# Process further attributes
my $sth2;
$sth2 = $dbh->prepare('INSERT OR REPLACE INTO logs_attr (keyref, attrib, value) VALUES (?, ?, ?);');
for my $key (keys %p) {
next if ( grep ( /^$key$/, @db_attribute_exclude_list ) or !$p{$key} );
# print STDERR "INSERT id $id, key $key, value $p{$key}\n";
$sth2->execute($id, $key, $p{$key});
}
$dbh->do("COMMIT;") or
do {
print STDERR "log_db_logstart: commit failed: $DBI::errstr\n";
return undef;
};
return $id;
}
sub log_db_logend
{
my $dbh = shift;
my %p = %{shift()};
# print STDERR "Package: " . $p{'package'} . "\n";
# Check mandatory fields
Carp::cluck "log_db_logend: Create DB log entry: DBH not defined\n" if (!$dbh);
Carp::carp "log_db_logend: No dbkey defined. Possibly LOGSTART event is missing?\n" if (!$p{dbkey});
if (!$dbh or !$p{dbkey}) {
return;
}
require Time::Piece;
my $t = Time::Piece->localtime;
my $logend = $t->strftime("%Y-%m-%d %H:%M:%S");
# Start transaction
$dbh->do("BEGIN TRANSACTION;");
# Insert main attributes
my $sth = $dbh->prepare('UPDATE logs set LOGEND = ?, LASTMODIFIED = ? WHERE LOGKEY = ? ;');
$sth->execute($logend, $logend, $p{dbkey}) or
do {
Carp::cluck "Error updating logend in DB: $DBI::errstr\n";
return undef;
};
# Process further attributes
my $sth2;
$sth2 = $dbh->prepare('INSERT OR REPLACE INTO logs_attr (keyref, attrib, value) VALUES (?, ?, ?);');
for my $key (keys %p) {
next if ( grep ( /^$key$/, @db_attribute_exclude_list ) );
$sth2->execute($p{dbkey}, $key, $p{$key});
}
$dbh->do("COMMIT;") or
do {
print STDERR "log_db_logend: commit failed: $DBI::errstr\n";
return undef;
};
return "Success";
}
sub log_db_recreate_session_by_id
{
my $self = shift;
my $key = $self->{dbkey};
my $dbh = log_db_init_database();
if(!$dbh) {
print STDERR " dbh not defined. Return undef\n<-- log_db_recreate_session_by_id\n";
return undef;
}
if(!$key) {
print STDERR " No logdb key defined. Return undef\n<-- log_db_recreate_session_by_id\n";
return undef;
}
require DBI;
# Get log object
my $qu = "SELECT PACKAGE, NAME, FILENAME, LOGSTART, LOGEND FROM logs WHERE LOGKEY = $key LIMIT 1;";
my $logshr = $dbh->selectall_arrayref($qu, { Slice => {} });
if (!@$logshr) {
print STDERR " LOGKEY does not exist. Return undef\n<-- log_db_recreate_session_by_id\n";
return undef;
}
# It is not possible to recover a finished session
if (@$logshr[0]->{LOGEND}) {
print STDERR " LOGKEY $key found, but log session has a LOGEND (session is finished) - return undef\n";
return undef;
}
# Get log attributes
my $qu2 = "SELECT attrib, value FROM logs_attr WHERE keyref = $key;";
my $logattrshr = $dbh->selectall_arrayref($qu2, { Slice => {} });
## Recreate log object with data
# Data from log table
$self->{package} = @$logshr[0]->{PACKAGE} if (@$logshr[0]->{PACKAGE});
$self->{name} = @$logshr[0]->{NAME} if (@$logshr[0]->{NAME});
$self->{filename} = @$logshr[0]->{FILENAME} if (@$logshr[0]->{FILENAME});
# Data from attribute table - loop through attributes
foreach my $attr ( keys @$logattrshr ) {
print STDERR "Attribute: @$logattrshr[$attr]->{attrib} / Value: @$logattrshr[$attr]->{value} \n" if ($DEBUG);
$self->{@$logattrshr[$attr]->{attrib}} = @$logattrshr[$attr]->{value} if (!$self->{@$logattrshr[$attr]->{attrib}});
}
return $key;
}
sub log_db_get_session_by_filename
{
my $self = shift;
my $filename = $self->{filename};
my $dbh = log_db_init_database();
if(!$dbh) {
print STDERR " dbh not defined. Return undef\n<-- log_db_get_session_by_filename\n";
return undef;
}
if(!$filename) {
print STDERR " No logdb key defined. Return undef\n<-- log_db_get_session_by_filename\n";
return undef;
}
require DBI;
# Get log object
my $qu = "SELECT PACKAGE, NAME, FILENAME, LOGSTART, LOGEND, LOGKEY FROM logs WHERE FILENAME = '$filename' ORDER BY LOGSTART DESC LIMIT 1;";
my $logshr = $dbh->selectall_arrayref($qu, { Slice => {} });
if (!@$logshr) {
print STDERR "log_db_get_session_by_filename: FILENAME has no dbkey. New key is created.\n" if ($DEBUG);
$self->{dbkey} = log_db_logstart($self->{dbh}, $self);
print STDERR "log_db_get_session_by_filename: New dbkey is " . $self->{dbkey} . "\n" if ($DEBUG);
} else {
$self->{dbkey} = @$logshr[0]->{LOGKEY};
print STDERR "log_db_get_session_by_filename: Existing dbkey is used " . $self->{dbkey} . "\n" if ($DEBUG);
}
# Get log attributes
my $qu2 = "SELECT attrib, value FROM logs_attr WHERE keyref = '" . $self->{dbkey} . "';";
my $logattrshr = $dbh->selectall_arrayref($qu2, { Slice => {} });
## Recreate log object with data
# Data from log table
# Data from attribute table - loop through attributes
foreach my $attr ( keys @$logattrshr ) {
print STDERR "Attribute: @$logattrshr[$attr]->{attrib} / Value: @$logattrshr[$attr]->{value} \n" if ($DEBUG);
$self->{@$logattrshr[$attr]->{attrib}} = @$logattrshr[$attr]->{value} if (!$self->{@$logattrshr[$attr]->{attrib}});
}
return $self->{dbkey};
}
sub log_db_bulk_delete_logkey
{
my ($dbh, @keys) = @_;
if(!$dbh && $DEBUG) {
print STDERR " dbh not defined. Return undef\n<-- log_db_bulk_delete_logkey\n";
return undef;
}
if(!@keys && $DEBUG) {
print STDERR " No Keys defined. Return undef\n<-- log_db_bulk_delete_logkey\n";
return undef;
}
require DBI;
return undef if (! $dbh);
print STDERR "Bulk delete BEGIN TRAN\n" if ($DEBUG);
$dbh->do("BEGIN TRANSACTION;");
foreach my $key (@keys) {
$dbh->do("DELETE FROM logs_attr WHERE keyref = $key;");
$dbh->do("DELETE FROM logs WHERE LOGKEY = $key;");
}
$dbh->do("DELETE FROM logs_attr WHERE keyref NOT IN (SELECT logkey FROM logs);");
print STDERR "Bulk delete COMMIT\n" if ($DEBUG);
$dbh->do("COMMIT;");
}
sub log_db_delete_logkey
{
my ($dbh, $key) = @_;
# print STDERR "log_db_deletelogkey -->\n" if ($DEBUG);
if(!$dbh && $DEBUG) {
print STDERR " dbh not defined. Return undef\n<-- log_db_deletelogkey\n";
return undef;
}
if(!$key && $DEBUG) {
print STDERR " No Key defined. Return undef\n<-- log_db_deletelogkey\n";
return undef;
}
# SQLite interface
require DBI;
# my $dbh = log_db_init_database();
return undef if (! $dbh);
$dbh->do("BEGIN TRANSACTION;");
$dbh->do("DELETE FROM logs_attr WHERE keyref = $key;");
$dbh->do("DELETE FROM logs WHERE LOGKEY = $key;");
print STDERR " Commit\n" if ($DEBUG);
$dbh->do("COMMIT;");
print STDERR "<--- log_db_delete_logkey\n" if ($DEBUG);
}
sub log_db_recreate_session
{
my $dbh = shift;
my $self = shift;
my $key = $self->{dbkey};
if(!$dbh) {
print STDERR "log_db_recreate_session: dbh not defined - Abort\n" if($DEBUG);
return;
}
if(!$key) {
print STDERR "log_db_recreate_session: dbkey not defined - Abort\n" if($DEBUG);
return;
}
my $qu = "SELECT PACKAGE, NAME, FILENAME, LOGSTART, LOGEND FROM logs WHERE LOGKEY = $key LIMIT 1;";
my $logshr = $dbh->selectall_arrayref($qu, { Slice => {} });
if (@$logshr) {
print STDERR "log_db_recreate_session: logkey exists, nothing to do\n" if($DEBUG);
return;
}
print STDERR "log_db_recreate_session: Session does not exist in DB - creating a new session\n";
$self->{dbkey} = log_db_logstart($self->{dbh}, $self);
}
################################################################
# get_logs
# Input: (optional) package, name
# Output: Array with hashref to log entries
################################################################
# PUBLIC FUNCTION
sub get_logs
{
my ($package, $name, $nofilter) = @_;
print STDERR "--> get_logs\n" if ($DEBUG);
# SQLite interface
require DBI;
my $dbh = log_db_init_database();
print STDERR "get_logs: Could not init database\n" if (! $dbh);
return undef if (! $dbh);
my $qu;
$qu = "SELECT * FROM logs ";
$qu .= "WHERE " if ($package);
$qu .= "PACKAGE = '$package' AND NAME = '$name' " if ($package && $name);
$qu .= "PACKAGE = '$package' " if ($package && !$name);
$qu .= "ORDER BY PACKAGE, NAME, LASTMODIFIED DESC ";
print STDERR " Query: $qu\n" if ($DEBUG);
my $logshr = $dbh->selectall_arrayref($qu, { Slice => {} });
my @logs;
my %logcount;
# my @keystodelete;
foreach my $key (@$logshr) {
my $filesize;
my $fileexists;
$fileexists = -e $key->{'FILENAME'};
$filesize = -s $key->{'FILENAME'} if ($fileexists);
if (!$nofilter and $key->{'LOGSTART'} and ! -e "$key->{'FILENAME'}") {
print STDERR "$key->{'FILENAME'} does not exist - skipping" if ($DEBUG);
next;
}
my %log;
require Time::Piece;
my $logstartobj = Time::Piece->strptime($key->{'LOGSTART'}, "%Y-%m-%d %H:%M:%S") if ($key->{'LOGSTART'});
my $logendobj = Time::Piece->strptime($key->{'LOGEND'}, "%Y-%m-%d %H:%M:%S") if ($key->{'LOGEND'});
my $lastmodifiedobj = Time::Piece->strptime($key->{'LASTMODIFIED'}, "%Y-%m-%d %H:%M:%S") if ($key->{'LASTMODIFIED'});
# # Delete by age (older than 1 month)
# if (time > ($lastmodifiedobj+2629746) ) {
# push @keystodelete, $key->{'LOGKEY'};
# # log_db_delete_logkey($dbh, $key->{'LOGKEY'});
# next;
# }
# # Count and delete (more than 20 per package)
# $logcount{$key->{'PACKAGE'}}{$key->{'NAME'}}++;
# if ($logcount{$key->{'PACKAGE'}}{$key->{'NAME'}} > 20) {
# push @keystodelete, $key->{'LOGKEY'};
# # log_db_delete_logkey($dbh, $key->{'LOGKEY'});
# next;
# }
$log{'LOGSTARTISO'} = $logstartobj->datetime if($logstartobj);
$log{'LOGSTARTSTR'} = $logstartobj->strftime("%d.%m.%Y %H:%M") if($logstartobj);
$log{'LOGENDISO'} = $logendobj->datetime if ($logendobj);
$log{'LOGENDSTR'} = $logendobj->strftime("%d.%m.%Y %H:%M") if ($logendobj);
$log{'LASTMODIFIEDISO'} = $lastmodifiedobj->datetime if ($lastmodifiedobj);
$log{'LASTMODIFIEDSTR'} = $lastmodifiedobj->strftime("%d.%m.%Y %H:%M") if ($lastmodifiedobj);
$log{'PACKAGE'} = $key->{'PACKAGE'};
$log{'NAME'} = $key->{'NAME'};
$log{'FILENAME'} = $key->{'FILENAME'};
$log{'KEY'} = $key->{'LOGKEY'};
my $qu_attr = "SELECT * FROM logs_attr WHERE keyref = '$key->{'LOGKEY'}';";
my @attribs = $dbh->selectall_array($qu_attr);
if (@attribs) {
foreach my $attrib (@attribs) {
$log{@$attrib[1]} = @$attrib[2];
# print STDERR "Attrib: 0:" . @$attrib[0] . " 1:" . @$attrib[1] . " 2:" . @$attrib[2] . "\n";
}
}
push(@logs, \%log);
}
# log_db_bulk_delete_logkey($dbh, @keystodelete);
return @logs;
}
##################################################################
##################################################################
# NOTIFICATION FUNCTIONS (notify)
my @notifications;
my $content_was_read;
my $notifications_error;
my $notifications_ok;
our $notification_dir = $LoxBerry::System::lbsdatadir . "/notifications";
# PUBLIC FUNCTION
sub notify
{
my ($package, $name, $message, $error) = @_;
print STDERR "notify --->\n" if ($DEBUG);
my $severity;
if ($error) {
$severity = 3;
} else {
$severity = 6;
}
# SQLite interface
require DBI;
my $dbh;
$dbh = notify_init_database();
print STDERR "notify: Could not init database.\n" if (! $dbh);
return undef if (! $dbh);
# Build hash
my %data = (
PACKAGE => $package,
NAME => $name,
MESSAGE => $message,
SEVERITY => $severity,
);
if ($LoxBerry::System::lbpplugindir) {
print STDERR " Detected plugin notification\n" if ($DEBUG);
$data{_ISPLUGIN} = 1;
} else {
print STDERR " Detected system notification\n" if ($DEBUG);
$data{_ISSYSTEM} = 1;
}
notify_insert_notification($dbh, \%data);
my $dbfile = $dbh->sqlite_db_filename();
$dbh->disconnect;
eval {
my $uid = (stat $dbfile)[4];
my $owner = (getpwuid $uid)[0];
if ($owner ne 'loxberry') {
my ($login,$pass,$uid,$gid) = getpwnam('loxberry');
chown $uid, $gid, $dbfile;
}
};
notify_send_mail(\%data);
print STDERR "<--- notify\n" if ($DEBUG);
}
# PUBLIC FUNCTION
sub notify_ext
{
print STDERR "notify_ext --->\n" if ($DEBUG);
# SQLite interface
require DBI;
require HTML::Entities;
my $dbh;
my $data = shift;
$data->{MESSAGE} = HTML::Entities::decode($data->{MESSAGE});
$dbh = notify_init_database();
print STDERR "notify_ext: Could not init database.\n" if (! $dbh);
return undef if (! $dbh);
if (! $data->{_ISPLUGIN} && ! $data->{_ISSYSTEM}) {
my $plugin = LoxBerry::System::plugindata($data->{PACKAGE});
if ($LoxBerry::System::lbpplugindir || $plugin) {
print STDERR " Detected plugin notification\n" if ($DEBUG);
$data->{_ISPLUGIN} = 1;
} else {
print STDERR " Detected system notification\n" if ($DEBUG);
$data->{_ISSYSTEM} = 1;
}
}
#require Encode;
#$data->{MESSAGE} = Encode::encode("utf8", $data->{MESSAGE});
notify_insert_notification($dbh, $data);
my $dbfile = $dbh->sqlite_db_filename();
$dbh->disconnect;
eval {
my $uid = (stat $dbfile)[4];
my $owner = (getpwuid $uid)[0];
if ($owner ne 'loxberry') {
my ($login,$pass,$uid,$gid) = getpwnam('loxberry');
chown $uid, $gid, $dbfile;
}
};
notify_send_mail($data);
print STDERR "<--- notify_ext finished\n" if ($DEBUG);
}
# INTERNAL FUNCTIONS
sub notify_init_database
{
my $dbfile = $LoxBerry::System::lbsdatadir . "/notifications_sqlite.dat";
my $dbh;
my $dores;
$dbh = DBI->connect("dbi:SQLite:dbname=$dbfile","","") or
do {
print STDERR "notify_init_database connect: $DBI::errstr\n";
return undef;
};
$dbh->{sqlite_unicode} = 1;
$dbh->do("CREATE TABLE IF NOT EXISTS notifications (
PACKAGE VARCHAR(255) NOT NULL,
NAME VARCHAR(255) NOT NULL,
MESSAGE TEXT,
SEVERITY INT,
timestamp DATETIME DEFAULT (datetime('now','localtime')) NOT NULL,
notifykey INTEGER PRIMARY KEY
)") or
do {
print STDERR "notify_init_database create table notifications: $DBI::errstr\n";
return undef;
};
$dbh->do("CREATE TABLE IF NOT EXISTS notifications_attr (
keyref INTEGER NOT NULL,
attrib VARCHAR(255) NOT NULL,
value VARCHAR(255),
PRIMARY KEY ( keyref, attrib )
)") or
do {
print STDERR "notify_init_database create table notifications_attr: $DBI::errstr\n";
return undef;
};
return $dbh;
}
# INTERNAL FUNCTION
sub notify_insert_notification
{
my $dbh = shift;
my %p = %{shift()};
# print STDERR "Package: " . $p{'package'} . "\n";
# Check mandatory fields
Carp::croak "Create notification: No PACKAGE defined\n" if (! $p{PACKAGE});
Carp::croak "Create notification: No NAME defined\n" if (! $p{NAME});
Carp::croak "Create notification: No MESSAGE defined\n" if (! $p{MESSAGE});
Carp::croak "Create notification: No SEVERITY defined\n" if (! $p{SEVERITY});
# Strip HTML from $message
$p{MESSAGE} =~ s/<br>/\\n/g;
$p{MESSAGE} =~ s/<p>/\\n/g;
$p{MESSAGE} =~ s/<.+?>//g;
# Start transaction
$dbh->do("BEGIN TRANSACTION;");
# Insert main notification
my $sth = $dbh->prepare('INSERT INTO notifications (PACKAGE, NAME, MESSAGE, SEVERITY) VALUES (?, ?, ?, ?) ;');
$sth->execute($p{PACKAGE}, $p{NAME}, $p{MESSAGE} , $p{SEVERITY}) or
do {
Carp::croak "Error inserting notification: $DBI::errstr\n";
return undef;
};
my $id = $dbh->sqlite_last_insert_rowid();
# Process further attributes
my $sth2;
$sth2 = $dbh->prepare('INSERT INTO notifications_attr (keyref, attrib, value) VALUES (?, ?, ?);');
for my $key (keys %p) {
next if ($key eq 'PACKAGE' or $key eq 'NAME' or $key eq 'MESSAGE' or $key eq 'SEVERITY');
$sth2->execute($id, $key, $p{$key});
}
$dbh->do("COMMIT;") or
do {
print STDERR "notify: commit failed: $DBI::errstr\n";
return undef;
};
return "Success";
$sth2->execute($id, 'logfile', 'This is the log');
$sth2->execute($id, 'level', 5);
}
# INTERNAL FUNCTION
sub notify_send_mail
{
my %p = %{shift()};
my $subject;
my $message;
my %mcfg;
# Don't try to send email that we cannot send emails
return if ($notifymailerror);
# Read mail settings
require LoxBerry::JSON;
my $sysmailobj = LoxBerry::JSON->new();
my $mcfg = $sysmailobj->open(filename => "$LoxBerry::System::lbsconfigdir/mail.json", readonly => 1);
# Don't send email if mail in general, or the specific mail type is disabled
return if ($p{SEVERITY} != 3 && $p{SEVERITY} != 6);
return if (! $mcfg or ! LoxBerry::System::is_enabled($mcfg->{SMTP}->{ACTIVATE_MAIL}));
return if (! LoxBerry::System::is_enabled($mcfg->{NOTIFICATION}->{MAIL_SYSTEM_ERRORS}) && $p{_ISSYSTEM} && $p{SEVERITY} == 3);
return if (! LoxBerry::System::is_enabled($mcfg->{NOTIFICATION}->{MAIL_SYSTEM_INFOS}) && $p{_ISSYSTEM} && $p{SEVERITY} == 6);
return if (! LoxBerry::System::is_enabled($mcfg->{NOTIFICATION}->{MAIL_PLUGIN_ERRORS}) && $p{_ISPLUGIN} && $p{SEVERITY} == 3);
return if (! LoxBerry::System::is_enabled($mcfg->{NOTIFICATION}->{MAIL_PLUGIN_INFOS}) && $p{_ISPLUGIN} && $p{SEVERITY} == 6);
# Prepare some additional fields
my $plugintitle;
if(!$p{_ISSYSTEM}) {
my $plugin = LoxBerry::System::plugindata($p{PACKAGE});
$plugintitle = defined $plugin->{PLUGINDB_TITLE} ? $plugin->{PLUGINDB_TITLE} : $p{PACKAGE};
}
# Add some values to the options
$p{SEVERITY_STR} = "INFO" if ($p{SEVERITY} == 6);
$p{SEVERITY_STR} = "ERROR" if ($p{SEVERITY} == 3);
$p{PLUGINTITLE} = $plugintitle;
if ($p{LOGFILE}) {
$p{LOGFILE_REL} = $p{LOGFILE};
$p{LOGFILE_REL} =~ s/^$LoxBerry::System::lbhomedir\///;
$p{LOGFILE_REL} =~ s/^log\///;
}
## Call the email provider
require JSON;
my $options_json = quotemeta(JSON::to_json(\%p) ) ;
my ($exitcode, $output) = LoxBerry::System::execute("$LoxBerry::System::lbssbindir/notifyproviders/email.pl $options_json");
if ($exitcode != 0) {
my %SL = LoxBerry::System::readlanguage(undef, undef, 1);
$notifymailerror = 1; # Prevents loops
my %notification = (
PACKAGE => "mailserver",
NAME => "mailerror",
MESSAGE => $SL{'MAILSERVER.NOTIFY_MAIL_ERROR'},
SEVERITY => 3, # Error
_ISSYSTEM => 1
);
LoxBerry::Log::notify_ext( \%notification );
print STDERR "Error sending email notification - Output: $output\n";
}
}
################################################################
# get_notifications
# Input: (optional) specific notification event filter
# Output: Hash with notifications
################################################################
# PUBLIC FUNCTION
sub get_notifications
{
# print STDERR "get_notifications called.\n" if ($DEBUG);
my ($package, $name) = @_;
print STDERR "--> get_notifications\n" if ($DEBUG);
# SQLite interface
require DBI;
my $dbh = notify_init_database();
print STDERR "get_notifications: Could not init database\n" if (! $dbh);
return undef if (! $dbh);
my $qu;
$qu = "SELECT * FROM notifications ";
$qu .= "WHERE " if ($package);
$qu .= "PACKAGE = '$package' AND NAME = '$name' " if ($package && $name);
$qu .= "PACKAGE = '$package' " if ($package && !$name);
$qu .= "ORDER BY timestamp DESC ";
print STDERR " Query: $qu\n" if ($DEBUG);
my $notifhr = $dbh->selectall_arrayref($qu, { Slice => {} });
my @notifications;
foreach my $key (@$notifhr ) {
require HTML::Entities;
require Time::Piece;
my %notification;
my $dateobj = Time::Piece->strptime($key->{'timestamp'}, "%Y-%m-%d %H:%M:%S");
my $contenthtml = $key->{'MESSAGE'};
$contenthtml = HTML::Entities::encode_entities($contenthtml, '<>&"');
$contenthtml =~ s/\n/<br>\n/g;
$notification{'DATEISO'} = $dateobj->datetime;
$notification{'DATESTR'} = $dateobj->strftime("%d.%m.%Y %H:%M");
$notification{'PACKAGE'} = $key->{'PACKAGE'};
$notification{'NAME'} = $key->{'NAME'};
$notification{'SEVERITY'} = $key->{'SEVERITY'};
$notification{'KEY'} = $key->{'notifykey'};
$notification{'CONTENTRAW'} = $key->{'MESSAGE'};
$notification{'CONTENTHTML'} = $contenthtml;
my $qu_attr = "SELECT * FROM notifications_attr WHERE keyref = '$key->{'notifykey'}';";
my @attribs = $dbh->selectall_array($qu_attr);
if (@attribs) {
foreach my $attrib (@attribs) {
$notification{@$attrib[1]} = @$attrib[2];
# print STDERR "Attrib: 0:" . @$attrib[0] . " 1:" . @$attrib[1] . " 2:" . @$attrib[2] . "\n";
}
}
push(@notifications, \%notification);
}
return @notifications;
}
# sub get_notifications_with_content
# {
# my ($package, $name, $latest) = @_;
# my @filtered = LoxBerry::Log::get_notifications($package, $name, $latest, undef, 1);
# return @filtered;
# }
# Retuns an array with the number of notifications
# PUBLIC FUNCTION
sub get_notification_count
{
my ($package, $name, $latest) = @_;
#my ($notification_error, $notification_ok, $notification_sum) = LoxBerry::Log::get_notifications($package, $name, $latest, 1);
print STDERR "get_notification_count -->\n" if ($DEBUG);
# SQLite interface
require DBI;
my $dbh = notify_init_database();
return undef if (! $dbh);
my $qu;
my @resinf;
my @reserr;
$qu = "SELECT count(*) FROM notifications ";
$qu .= "WHERE " if ($package);
$qu .= "PACKAGE = '$package' AND NAME = '$name' AND " if ($package && $name);
$qu .= "PACKAGE = '$package' AND " if ($package && !$name);
my $querr = $qu . "SEVERITY = 3;";
my $quinf = $qu . "SEVERITY = 6;";
# print STDERR "Error Query: $querr\n" if ($DEBUG);
# print STDERR "Info Query: $quinf\n" if ($DEBUG);
my ($notification_error) = $dbh->selectrow_array($querr);
my ($notification_ok) = $dbh->selectrow_array($quinf);
print STDERR " Error Count: $notification_error\n" if ($DEBUG);
print STDERR " Info Count: $notification_ok\n" if ($DEBUG);
print STDERR "<-- get_notification_count\n" if ($DEBUG);
return $notification_error, $notification_ok, ($notification_error+$notification_ok);
}
# PUBLIC FUNCTION
sub delete_notifications
{
my ($package, $name, $ignorelatest) = @_;
print STDERR "delete_notifications -->\n" if ($DEBUG);
print STDERR " No PACKAGE defined. Return undef\n<-- delete_notifications\n" if (!$package && $DEBUG);
return undef if (!$package);
# SQLite interface
require DBI;
my $dbh = notify_init_database();
return undef if (! $dbh);
my $qu;
my @resinf;
my @reserr;
$dbh->do("BEGIN TRANSACTION;");
$qu = "SELECT notifykey FROM notifications ";
$qu .= "WHERE " if ($package || $name || $ignorelatest);
$qu .= "PACKAGE = '$package' AND NAME = '$name' " if ($package && $name);
$qu .= "PACKAGE = '$package' " if ($package && !$name);
if ($ignorelatest) {
my $qu_latest = $qu . "ORDER BY timestamp DESC LIMIT 1;";
my ($latest) = $dbh->selectrow_array($qu_latest);
$qu .= "AND " if ($package && $latest);
$qu .= "notifykey <> $latest " if ($package && $latest);
print STDERR " Key to keep: $latest\n" if ($DEBUG);
}
$qu .=";";
# print STDERR "Select Keys to delete query: $qu\n";
my @keylist = $dbh->selectall_array($qu);
my $number_to_delete = scalar @keylist;
print STDERR " Number of elements to delete: $number_to_delete\n" if ($DEBUG);
if ($number_to_delete < 1) {
print STDERR " Nothing to do. Rollback and returning.\n<--- delete_notifications\n" if ($DEBUG);
$dbh->do("ROLLBACK;");
return;
}
my $deletelist;
foreach my $key (@keylist) {
$deletelist .= "@$key[0], ";
}
$deletelist = LoxBerry::System::trim($deletelist);
$deletelist =~ s/,$//;
print STDERR " Deletelist: $deletelist\n" if ($DEBUG);
$dbh->do("DELETE FROM notifications_attr WHERE keyref IN ($deletelist);");
$dbh->do("DELETE FROM notifications WHERE notifykey IN ($deletelist);");
print STDERR " Commit\n" if ($DEBUG);
$dbh->do("COMMIT;");
print STDERR "<--- delete_notifications\n" if ($DEBUG);
}
sub delete_notification_key
{
my ($key) = @_;
print STDERR "delete_notification_key -->\n" if ($DEBUG);
print STDERR " No Key defined. Return undef\n<-- delete_notification_key\n" if (!$key && $DEBUG);
return undef if (!$key);
# SQLite interface
require DBI;
my $dbh = notify_init_database();
return undef if (! $dbh);
$dbh->do("BEGIN TRANSACTION;");
$dbh->do("DELETE FROM notifications_attr WHERE keyref = $key;");
$dbh->do("DELETE FROM notifications WHERE notifykey = $key;");
print STDERR " Commit\n" if ($DEBUG);
$dbh->do("COMMIT;");
print STDERR "<--- delete_notification_key\n" if ($DEBUG);
}
# sub notification_content
# {
# my ($key) = @_;
# my $notifyfile = "$notification_dir/$key";
# open (my $fh, "<" , $notifyfile) or return undef;
# my $content = <$fh>;
# close ($fh);
# my $contenthtml = $content;
# $contenthtml =~ s/\n/<br>\n/g;
# $contenthtml = HTML::Entities::encode_entities($contenthtml, '<>&"');
# print STDERR "Contentraw: $content ContentHTML: $contenthtml\n" if ($DEBUG);
# return $content, $contenthtml;
# }
sub get_notifications_html
{
my %p = @_;
my ($package, $name, $type, $buttons) = @_;
print STDERR "get_notifications_html --->\n" if ($DEBUG);
$p{package} = $package if ($package);
$p{name} = $name if ($name);
$p{buttons} = $buttons if ($buttons);
$p{error} = 1 if (!$type || $type == 2 || $type eq 'all' || $type eq 'err' || $type eq 'error' || $type eq 'errors');
$p{info} = 1 if (!$type || $type == 1 || $type eq 'all' || $type eq 'inf' || $type eq 'info' || $type eq 'infos');
my @notifs = LoxBerry::Log::get_notifications($package, $name);
if ($DEBUG) {
print STDERR " Parameters used:\n";
print STDERR " package: $p{package}\n";
print STDERR " name: $p{name}\n";
print STDERR " buttons: $p{buttons}\n";
print STDERR " error: $p{error}\n";
print STDERR " info: $p{info}\n";
}
if (! @notifs) {
print STDERR "<--- No notifications found. Returning nothing.\n" if ($DEBUG);
return;
}
my @notify_html;
my $all_notifys;
my $randval = int(rand(30000));
foreach my $not (@notifs) {
# Don't show info when errors are requested
print STDERR "Notification: $not->{SEVERITY} $not->{DATESTR} $not->{PACKAGE} $not->{NAME} $not->{CONTENTRAW}\n" if ($DEBUG);
next if ($not->{SEVERITY} != 3 && $not->{SEVERITY} != 6);
if ( $not->{SEVERITY} == 3 && ! $p{error} ) {
print STDERR "Skipping notification - is error but info requested\n" if ($DEBUG);
next;
}
# Don't show errors when infos are requested
if ( $not->{SEVERITY} == 6 && ! $p{error} ) {
print STDERR "Skipping notification - is info but error requested\n" if ($DEBUG);
next;
}
my $logfilepath;
if ( $not->{LOGFILE} ) {
$logfilepath = $not->{LOGFILE};
$logfilepath =~ s/^$LoxBerry::System::lbhomedir\///;
$logfilepath =~ s/^log\///;
}
my $link;
my $linktarget;
if ( $not->{LINK} ) {
$link = $not->{LINK};
$linktarget = ( LoxBerry::System::begins_with($link, "http://") or LoxBerry::System::begins_with($link, "https://") ) ? "_blank" : "_self";
}
my $notif_line;
$notif_line = qq(<div style='display:table-row;' class='notifyrow$randval' id='notifyrow$not->{KEY}'>\n);
$notif_line .= qq( <div style="display:table-cell; vertical-align: middle; width:30px; padding:10px;">\n);
if ($not->{SEVERITY} == 6) {
$notif_line .= qq( <img src="/system/images/notification_info_small.svg">\n);
} elsif ($not->{SEVERITY} == 3) {
$notif_line .= qq( <img src="/system/images/notification_error_small.svg">\n);
}
$notif_line .= qq( </div>\n);
$notif_line .= qq( <div style='vertical-align: middle; width:75%; display: table-cell; padding: 7px;'><b>$not->{DATESTR}:</b> $not->{CONTENTHTML}</div>\n);
$notif_line .= qq( <div style='vertical-align: middle; width:25%; display: table-cell; align:right; text-align: right;'>\n);
$notif_line .= qq( <a class="btnlogs" data-role="button" href="/admin/system/tools/logfile.cgi?logfile=$logfilepath&header=html&format=template" target="_blank" data-inline="true" data-mini="true" data-icon="arrow-d">Logfile</a>\n) if ($logfilepath);
$notif_line .= qq( <a class="btnlink" data-role="button" href="$link" target="$linktarget" data-inline="true" data-mini="true" data-icon="action">Details</a>\n) if ($link);
$notif_line .= qq( <a href='#' class='notifdelete' id='notifdelete$not->{KEY}' data-delid='$not->{KEY}' data-role='button' data-icon='delete' data-iconpos='notext' data-inline='true' data-mini='true'>(X)</a>\n);
$notif_line .= qq( </div>\n);
# print STDERR $notif_line if ($DEBUG);
$notif_line .= qq(</div>\n);
$all_notifys .= $notif_line;
push (@notify_html, $notif_line);
}
return if (! $all_notifys);
require HTML::Template;
our $maintemplate = HTML::Template->new(
filename => "$LoxBerry::System::lbstemplatedir/get_notification_html.html",
global_vars => 1,
loop_context_vars => 1,
die_on_bad_params=> 0,
%LoxBerry::System::htmltemplate_options,
);
$maintemplate->param( 'NOTIFICATIONS' => $all_notifys);
$maintemplate->param( 'RAND' => $randval );
my %SL = LoxBerry::System::readlanguage($maintemplate, undef, 1);
#print STDERR
return $maintemplate->output();
}
#####################################################
# Parse yyyymmdd_hhmmss date to date object
#####################################################
sub parsedatestring
{
my ($datestring) = @_;
my $dt;
eval {
require Time::Piece;
$dt = Time::Piece->strptime($datestring, "%Y%m%d_%H%M%S");
};
# LOGDEB "parsedatestring: Calculated date/time: " . $dt->strftime("%d.%m.%Y %H:%M");
return $dt;
}
# INTERNAL FUNCTION
sub get_severity
{
my ($sevstr) = @_;
$sevstr = lc $sevstr;
# Ordered by most possible occurrency
# 3
my @error = ("3", "err", "error", "logerr");
return 3 if ( grep( /^$sevstr$/, @error ) );
# 6
my @info = ("6", "inf", "info", "loginf");
return 6 if ( grep( /^$sevstr$/, @info ) );
# 4
my @warning = ("4", "warn", "warning", "logwarn");
return 4 if ( grep( /^$sevstr$/, @warning ) );
# 5
my @ok = ("5", "ok", "logok");
return 5 if ( grep( /^$sevstr$/, @ok ) );
# 7
my @debug = ("7", "debug", "deb", "logdeb", "logdebug");
return 7 if ( grep( /^$sevstr$/, @debug ) );
# 2
my @critical = ("2", "critical", "crit", "critic", "logcrit");
return 2 if ( grep( /^$sevstr$/, @critical ) );
# 1
my @alert = ("1", "alert", "logalert");
return 1 if ( grep( /^$sevstr$/, @alert ) );
# 0
my @emerge = ("0", "emerg", "emerge", "emergency", "logemerge");
return 0 if ( grep( /^$sevstr$/, @emerge ) );
return undef;
}
##################################################################
##################################################################
## PACKAGE MAIN
package main;
####################################################
# Exported helpers
####################################################
sub LOGDEB
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->DEB(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGINF
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->INF(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGOK
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->OK(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGWARN
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->WARN(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGERR
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->ERR(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGCRIT
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->CRIT(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGALERT
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->ALERT(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGEMERGE
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->EMERGE(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGSTART
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->LOGSTART(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGEND
{
create_temp_logobject() if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->LOGEND(@_); # or Carp::carp("No default object set for exported logging functions.");
}
sub LOGTITLE
{
return if (! $LoxBerry::Log::mainobj);
$LoxBerry::Log::mainobj->logtitle(@_);
}
sub create_temp_logobject
{
my $package;
if (! $LoxBerry::System::lbpplugindir) {
# No package found
$package = $0;
}
else {
$package = $LoxBerry::System::lbpplugindir;
}
my $pluginloglevel = LoxBerry::System::pluginloglevel();
if (! $pluginloglevel or $pluginloglevel < 0) {
$pluginloglevel = 7;
}
$LoxBerry::Log::mainobj = LoxBerry::Log->new (
package => $package,
name => 'STDERR',
stderr => 1,
nofile => 1,
addtime => 1,
loglevel => $pluginloglevel
);
}
#####################################################
# Finally 1; ########################################
#####################################################
1;
| 27.652935 | 257 | 0.598004 |
edda726d8a198e36497fa458f18f40bff70bd787 | 662 | pm | Perl | pdu-perl-api/Raritan/RPC/lhxmodel/Lhx_1_1_1/Settings.pm | gregoa/raritan-pdu-json-rpc-sdk | 76df982462742b97b52872aa34630140f5df7e58 | [
"BSD-3-Clause"
] | 1 | 2021-04-29T23:04:17.000Z | 2021-04-29T23:04:17.000Z | pdu-perl-api/Raritan/RPC/lhxmodel/Lhx_1_1_1/Settings.pm | gregoa/raritan-pdu-json-rpc-sdk | 76df982462742b97b52872aa34630140f5df7e58 | [
"BSD-3-Clause"
] | null | null | null | pdu-perl-api/Raritan/RPC/lhxmodel/Lhx_1_1_1/Settings.pm | gregoa/raritan-pdu-json-rpc-sdk | 76df982462742b97b52872aa34630140f5df7e58 | [
"BSD-3-Clause"
] | 2 | 2020-06-20T16:21:23.000Z | 2021-09-28T19:04:44.000Z | # SPDX-License-Identifier: BSD-3-Clause
#
# Copyright 2020 Raritan Inc. All rights reserved.
#
# This file was generated by IdlC from Lhx.idl.
use strict;
package Raritan::RPC::lhxmodel::Lhx_1_1_1::Settings;
sub encode {
my ($in) = @_;
my $encoded = {};
$encoded->{'setpointWaterValve'} = 1 * $in->{'setpointWaterValve'};
$encoded->{'setpointVentilators'} = 1 * $in->{'setpointVentilators'};
return $encoded;
}
sub decode {
my ($agent, $in) = @_;
my $decoded = {};
$decoded->{'setpointWaterValve'} = $in->{'setpointWaterValve'};
$decoded->{'setpointVentilators'} = $in->{'setpointVentilators'};
return $decoded;
}
1;
| 23.642857 | 73 | 0.637462 |
73fabf5d8a124f869d1b800c2911739e22d8a80f | 1,924 | pm | Perl | lib/Perl/Critic/Policy/Bangs/ProhibitNoPlan.pm | jonasbn/perl-critic-bangs | 2d5fbc8b40aacdc5297af6a15bec80a0b3dba2db | [
"Artistic-2.0"
] | 4 | 2016-05-09T05:33:58.000Z | 2021-10-24T02:04:51.000Z | lib/Perl/Critic/Policy/Bangs/ProhibitNoPlan.pm | jonasbn/perl-critic-bangs | 2d5fbc8b40aacdc5297af6a15bec80a0b3dba2db | [
"Artistic-2.0"
] | 13 | 2016-01-27T19:47:38.000Z | 2021-11-09T15:03:09.000Z | lib/Perl/Critic/Policy/Bangs/ProhibitNoPlan.pm | jonasbn/perl-critic-bangs | 2d5fbc8b40aacdc5297af6a15bec80a0b3dba2db | [
"Artistic-2.0"
] | 3 | 2015-03-12T21:20:34.000Z | 2021-11-09T13:06:23.000Z | package Perl::Critic::Policy::Bangs::ProhibitNoPlan;
use strict;
use warnings;
use Perl::Critic::Utils;
use base 'Perl::Critic::Policy';
our $VERSION = '1.12';
sub supported_parameters { return () }
sub default_severity { return $SEVERITY_LOW }
sub default_themes { return qw( bangs tests ) }
sub applies_to { return 'PPI::Token::QuoteLike::Words' }
#---------------------------------------------------------------------------
sub violates {
my ( $self, $elem, $doc ) = @_;
if ( $elem =~ qr/\bno_plan\b/ ) {
# Make sure that the previous sibling was Test::More, or return
my $sib = $elem->sprevious_sibling() || return;
$sib->isa('PPI::Token::Word') && $sib eq 'Test::More' || return;
my $desc = q(Test::More with "no_plan" found);
my $expl = q(Test::More should be given a plan indicating the number of tests run);
return $self->violation( $desc, $expl, $elem );
}
return;
}
1;
__END__
=head1 NAME
Perl::Critic::Policy::Bangs::ProhibitNoPlan - Know what you're going to test.
=head1 AFFILIATION
This Policy is part of the L<Perl::Critic::Bangs> distribution.
=head1 DESCRIPTION
Test::More should be given a plan indicting the number of tests to be
run. This policy searches for instances of Test::More called with
"no_plan".
=head1 CONFIGURATION
This Policy is not configurable except for the standard options.
=head1 AUTHOR
Andrew Moore <[email protected]>
=head1 ACKNOWLEDGMENTS
Adapted from policies by Jeffrey Ryan Thalhammer <[email protected]>,
Based on App::Fluff by Andy Lester, "<andy at petdance.com>"
=head1 COPYRIGHT
Copyright (c) 2006-2013 Andy Lester <[email protected]> and Andrew
Moore <[email protected]>
This library is free software; you can redistribute it and/or modify it
under the terms of the Artistic License 2.0.
=cut
| 26.356164 | 91 | 0.64605 |
edc1a352c37c80c9750f71a4c92044246562baeb | 2,784 | pm | Perl | lib/Mojo/Server/PSGI.pm | chylli-binary/mojo | fb221bb5c8c94c21eb7202be09833f5592e95d0d | [
"Artistic-2.0"
] | 273 | 2016-06-01T22:04:50.000Z | 2022-03-06T01:52:02.000Z | lib/Mojo/Server/PSGI.pm | chylli-binary/mojo | fb221bb5c8c94c21eb7202be09833f5592e95d0d | [
"Artistic-2.0"
] | 1,317 | 2016-05-31T06:49:50.000Z | 2022-03-25T17:13:51.000Z | lib/Mojo/Server/PSGI.pm | chylli-binary/mojo | fb221bb5c8c94c21eb7202be09833f5592e95d0d | [
"Artistic-2.0"
] | 62 | 2016-06-03T20:17:17.000Z | 2022-03-02T19:02:04.000Z | package Mojo::Server::PSGI;
use Mojo::Base 'Mojo::Server';
sub run {
my ($self, $env) = @_;
my $tx = $self->build_tx;
my $req = $tx->req->parse($env);
$tx->local_port($env->{SERVER_PORT})->remote_address($env->{REMOTE_ADDR});
# Request body (may block if we try to read too much)
my $len = $env->{CONTENT_LENGTH};
until ($req->is_finished) {
my $chunk = ($len && $len < 131072) ? $len : 131072;
last unless my $read = $env->{'psgi.input'}->read(my $buffer, $chunk, 0);
$req->parse($buffer);
last if ($len -= $read) <= 0;
}
$self->emit(request => $tx);
# Response headers
my $res = $tx->res->fix_headers;
my $hash = $res->headers->to_hash(1);
my @headers;
for my $name (keys %$hash) { push @headers, $name, $_ for @{$hash->{$name}} }
# PSGI response
my $io = Mojo::Server::PSGI::_IO->new(tx => $tx, empty => $tx->is_empty);
return [$res->code // 404, \@headers, $io];
}
sub to_psgi_app {
my $self = shift;
# Preload application and wrap it
$self->app->server($self);
return sub { $self->run(@_) }
}
package Mojo::Server::PSGI::_IO;
use Mojo::Base -base;
# Finish transaction
sub close { shift->{tx}->closed }
sub getline {
my $self = shift;
# Empty
return undef if $self->{empty};
# No content yet, try again later
my $chunk = $self->{tx}->res->get_body_chunk($self->{offset} //= 0);
return '' unless defined $chunk;
# End of content
return undef unless length $chunk;
$self->{offset} += length $chunk;
return $chunk;
}
1;
=encoding utf8
=head1 NAME
Mojo::Server::PSGI - PSGI server
=head1 SYNOPSIS
use Mojo::Server::PSGI;
my $psgi = Mojo::Server::PSGI->new;
$psgi->unsubscribe('request')->on(request => sub {
my ($psgi, $tx) = @_;
# Request
my $method = $tx->req->method;
my $path = $tx->req->url->path;
# Response
$tx->res->code(200);
$tx->res->headers->content_type('text/plain');
$tx->res->body("$method request for $path!");
# Resume transaction
$tx->resume;
});
my $app = $psgi->to_psgi_app;
=head1 DESCRIPTION
L<Mojo::Server::PSGI> allows L<Mojolicious> applications to run on all L<PSGI>
compatible servers.
See L<Mojolicious::Guides::Cookbook/"DEPLOYMENT"> for more.
=head1 EVENTS
L<Mojo::Server::PSGI> inherits all events from L<Mojo::Server>.
=head1 ATTRIBUTES
L<Mojo::Server::PSGI> inherits all attributes from L<Mojo::Server>.
=head1 METHODS
L<Mojo::Server::PSGI> inherits all methods from L<Mojo::Server> and implements
the following new ones.
=head2 run
my $res = $psgi->run($env);
Run L<PSGI>.
=head2 to_psgi_app
my $app = $psgi->to_psgi_app;
Turn L<Mojolicious> application into L<PSGI> application.
=head1 SEE ALSO
L<Mojolicious>, L<Mojolicious::Guides>, L<https://mojolicious.org>.
=cut
| 21.251908 | 79 | 0.631466 |
edb507b614cb533da029711544de7720fef81516 | 1,048 | pm | Perl | lib/Dash/Html/Components/Figcaption.pm | weatherwax/perl-Dash | 60783830ae3990c0b839c71d96b72144325e30f8 | [
"MIT"
] | 4 | 2019-12-31T05:08:42.000Z | 2020-07-19T04:41:06.000Z | lib/Dash/Html/Components/Figcaption.pm | weatherwax/perl-Dash | 60783830ae3990c0b839c71d96b72144325e30f8 | [
"MIT"
] | 1 | 2021-03-07T09:17:10.000Z | 2021-03-07T09:51:33.000Z | lib/Dash/Html/Components/Figcaption.pm | weatherwax/perl-Dash | 60783830ae3990c0b839c71d96b72144325e30f8 | [
"MIT"
] | 2 | 2020-04-22T08:17:55.000Z | 2021-01-02T15:46:50.000Z | # AUTO GENERATED FILE - DO NOT EDIT
package Dash::Html::Components::Figcaption;
use Moo;
use strictures 2;
use Dash::Html::ComponentsAssets;
use namespace::clean;
extends 'Dash::BaseComponent';
has 'id' => (
is => 'rw'
);
has 'children' => (
is => 'rw'
);
has 'n_clicks' => (
is => 'rw'
);
has 'n_clicks_timestamp' => (
is => 'rw'
);
has 'key' => (
is => 'rw'
);
has 'role' => (
is => 'rw'
);
has 'accessKey' => (
is => 'rw'
);
has 'className' => (
is => 'rw'
);
has 'contentEditable' => (
is => 'rw'
);
has 'contextMenu' => (
is => 'rw'
);
has 'dir' => (
is => 'rw'
);
has 'draggable' => (
is => 'rw'
);
has 'hidden' => (
is => 'rw'
);
has 'lang' => (
is => 'rw'
);
has 'spellCheck' => (
is => 'rw'
);
has 'style' => (
is => 'rw'
);
has 'tabIndex' => (
is => 'rw'
);
has 'title' => (
is => 'rw'
);
has 'loading_state' => (
is => 'rw'
);
my $dash_namespace = 'dash_html_components';
sub DashNamespace {
return $dash_namespace;
}
sub _js_dist {
return Dash::Html::ComponentsAssets::_js_dist;
}
1;
| 13.265823 | 50 | 0.528626 |
edd650db29a7c8cdd14e386399c3a99ca8a0f407 | 61 | t | Perl | _templates/api/help/index.ejs.t | kperson/swift-lambda-starter-project | 34137e7628d747a9e90111d74afa9076076f8c31 | [
"MIT"
] | null | null | null | _templates/api/help/index.ejs.t | kperson/swift-lambda-starter-project | 34137e7628d747a9e90111d74afa9076076f8c31 | [
"MIT"
] | null | null | null | _templates/api/help/index.ejs.t | kperson/swift-lambda-starter-project | 34137e7628d747a9e90111d74afa9076076f8c31 | [
"MIT"
] | null | null | null | ---
message: |
- hygen {bold api} new --api [API_NAME]
---
| 12.2 | 41 | 0.540984 |
edc3e61a2e79a10d986a86d5c698dc640c526c79 | 344 | pl | Perl | perl/quebec/quebec.pl | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
] | null | null | null | perl/quebec/quebec.pl | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
] | null | null | null | perl/quebec/quebec.pl | trammell/test | ccac5e1dac947032e64d813e53cb961417a58d05 | [
"Artistic-2.0"
] | null | null | null | #!/usr/local/bin/perl -l
use strict;
use warnings FATAL => 'all';
binmode STDOUT, ':utf8';
my $quebec = "Qu\x{e9}bec";
print $quebec;
for my $ch (split //, $quebec) {
printf qq{"%s" %3d 0x%02x\n}, $ch, (ord($ch)) x 2;
}
(my $foo = $quebec) =~ s{Qu.*bec}{Quebec};
print $foo;
(my $bar = $quebec) =~ s/Qu\x{e9}bec/Quebec/;
print $bar;
| 16.380952 | 54 | 0.572674 |
73ffaf4f0b6738dc92b20840ceb14b1e7f08579d | 3,306 | pm | Perl | auto-lib/Paws/EC2/CreateImage.pm | cah-rfelsburg/paws | de9ffb8d49627635a2da588066df26f852af37e4 | [
"Apache-2.0"
] | 2 | 2016-09-22T09:18:33.000Z | 2017-06-20T01:36:58.000Z | auto-lib/Paws/EC2/CreateImage.pm | cah-rfelsburg/paws | de9ffb8d49627635a2da588066df26f852af37e4 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/EC2/CreateImage.pm | cah-rfelsburg/paws | de9ffb8d49627635a2da588066df26f852af37e4 | [
"Apache-2.0"
] | null | null | null |
package Paws::EC2::CreateImage;
use Moose;
has BlockDeviceMappings => (is => 'ro', isa => 'ArrayRef[Paws::EC2::BlockDeviceMapping]', traits => ['NameInRequest'], request_name => 'blockDeviceMapping' );
has Description => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'description' );
has DryRun => (is => 'ro', isa => 'Bool', traits => ['NameInRequest'], request_name => 'dryRun' );
has InstanceId => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'instanceId' , required => 1);
has Name => (is => 'ro', isa => 'Str', traits => ['NameInRequest'], request_name => 'name' , required => 1);
has NoReboot => (is => 'ro', isa => 'Bool', traits => ['NameInRequest'], request_name => 'noReboot' );
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'CreateImage');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::EC2::CreateImageResult');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::EC2::CreateImage - Arguments for method CreateImage on Paws::EC2
=head1 DESCRIPTION
This class represents the parameters used for calling the method CreateImage on the
Amazon Elastic Compute Cloud service. Use the attributes of this class
as arguments to method CreateImage.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to CreateImage.
As an example:
$service_obj->CreateImage(Att1 => $value1, Att2 => $value2, ...);
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
=head1 ATTRIBUTES
=head2 BlockDeviceMappings => ArrayRef[L<Paws::EC2::BlockDeviceMapping>]
Information about one or more block device mappings.
=head2 Description => Str
A description for the new image.
=head2 DryRun => Bool
Checks whether you have the required permissions for the action,
without actually making the request, and provides an error response. If
you have the required permissions, the error response is
C<DryRunOperation>. Otherwise, it is C<UnauthorizedOperation>.
=head2 B<REQUIRED> InstanceId => Str
The ID of the instance.
=head2 B<REQUIRED> Name => Str
A name for the new image.
Constraints: 3-128 alphanumeric characters, parentheses (()), square
brackets ([]), spaces ( ), periods (.), slashes (/), dashes (-), single
quotes ('), at-signs (@), or underscores(_)
=head2 NoReboot => Bool
By default, this parameter is set to C<false>, which means Amazon EC2
attempts to shut down the instance cleanly before image creation and
then reboots the instance. When the parameter is set to C<true>, Amazon
EC2 doesn't shut down the instance before creating the image. When this
option is used, file system integrity on the created image can't be
guaranteed.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method CreateImage in L<Paws::EC2>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| 32.411765 | 249 | 0.712039 |
ed7f79b7dfc7357b38e3db6345f0574ed03f96b0 | 951 | t | Perl | DIR819_v1.06/src/kernel/linux-2.6.36.x/drivers/net/eip93_drivers/quickSec/src/ipsec/quicksec/kernel/accelerator_none_project.t | Sirherobrine23/Dir819gpl_code | 8af92d65416198755974e3247b7bbe7f1151d525 | [
"BSD-2-Clause"
] | 1 | 2022-03-19T06:38:01.000Z | 2022-03-19T06:38:01.000Z | DIR819_v1.06/src/kernel/linux-2.6.36.x/drivers/net/eip93_drivers/quickSec/src/ipsec/quicksec/kernel/accelerator_none_project.t | Sirherobrine23/Dir819gpl_code | 8af92d65416198755974e3247b7bbe7f1151d525 | [
"BSD-2-Clause"
] | null | null | null | DIR819_v1.06/src/kernel/linux-2.6.36.x/drivers/net/eip93_drivers/quickSec/src/ipsec/quicksec/kernel/accelerator_none_project.t | Sirherobrine23/Dir819gpl_code | 8af92d65416198755974e3247b7bbe7f1151d525 | [
"BSD-2-Clause"
] | 1 | 2022-03-19T06:38:03.000Z | 2022-03-19T06:38:03.000Z | .set project_name accelerator_none
.set project_type drvlib
.set project_platforms
.set project_guid E2B0E928-1111-3EFE-9A00-F07EE44B4C6A
.set project_dir ipsec\\quicksec\\kernel
.set project_dir_inverse ..\\..\\..
.set project_incdirs \
ipsec\\hwaccel \
ipsec\\util \
ipsec \
ipsec\\include \
ipsec\\quicksec \
interceptor\\include \
ipsec\\quicksec\\engine \
include \
lib\\sshcrypto \
lib\\sshcrypto\\sshcipher \
lib\\sshcrypto\\sshhash \
lib\\sshcrypto\\sshmac \
lib\\zlib \
interceptor\\windows \
ipsec\\quicksec\\fastpath\\software \
ipsec\\quicksec\\fastpath \
interceptor\\libkernelutil \
ipsec\\quicksec\\kernel \
.
.set project_defs \
SSH_BUILD_IPSEC \
HAVE_CONFIG_H
.set project_cflags
.set project_rcflags
.set project_libdirs
.set project_ldflags
.set project_libs
.set project_dependencies
.set outdir .
.set srcs \
hwaccel_none.c
.set dir_hwaccel_none.c ipsec\\hwaccel\\none
.set custom_tags
.set rsrcs
.set hdrs
| 22.116279 | 54 | 0.758149 |
73d272e588f74fb6660a8c5a5e2bbdd9ebd4ca1e | 2,833 | pl | Perl | report/mico-statistics.pl | TonyChengTW/POP3Tools | 9b0465b98e6ff849a6b5c2f8263cf450caf2648e | [
"Apache-2.0"
] | null | null | null | report/mico-statistics.pl | TonyChengTW/POP3Tools | 9b0465b98e6ff849a6b5c2f8263cf450caf2648e | [
"Apache-2.0"
] | null | null | null | report/mico-statistics.pl | TonyChengTW/POP3Tools | 9b0465b98e6ff849a6b5c2f8263cf450caf2648e | [
"Apache-2.0"
] | null | null | null | #!/usr/local/bin/perl
#-----------------------------
# Writer : Mico Cheng
# Version: 2005012501
# Use for: accounting SPAM/VIRUS/TOTAL ratio for mx/ms/smtp
# Host : mx/ms/smtp
#-----------------------------
die "Usage: mico-statistics.pl <maillog file>\n" until ($#ARGV == 0 );
$mx_sa_tag2_level_deflt = 6.5;
$mx_sa_kill_level_deflt = 6.5;
$smtp_sa_tag2_level_deflt = 5.5;
$smtp_sa_kill_level_deflt = 8;
$maillog_file = $ARGV[0];$_ = $ARGV[0];
s/maillog/maildebug/;
$maildebug_file = $_;
($search_date) = ($maillog_file =~ /(200[5-9]\d{4})/);
($server_type) = ($maillog_file =~ /((smtp|mx|ms)\d*)/);
if ($server_type =~ 'smtp') {
$sa_tag2_level_deflt = $smtp_sa_tag2_level_deflt;
$sa_kill_level_deflt = $smtp_sa_kill_level_deflt;
} elsif ($server_type =~ 'mx') {
$sa_tag2_level_deflt = $mx_sa_tag2_level_deflt;
$sa_kill_level_deflt = $mx_sa_kill_level_deflt;
}
$server_type = uc($server_type).' Server';
# Generate SPAM/VIRUS/UNCHK/TOTAL count & Ratio
chomp($_ = `gzcat $maillog_file|grep ', Yes,'|wc -l`);
s/^\s+//;$SPAM_messages = $_;
chomp($_ = `gzcat $maillog_file|grep 'discarded,.*VIRUS'|wc -l`);
s/^\s+//;$VIRUS_messages = $_;
chomp($_ = `gzcat $maillog_file|grep 'message-id'|awk '{print \$10}'|sort|uniq|wc -l`);
s/^\s+//;$TOTAL_messages = $_;
chomp($_ = `gzcat $maillog_file|grep '\] connect'|egrep -v '127.0.0.1|210.200.211.61'|wc -l`);
s/^\s+//;$TOTAL_connections = $_;
chomp($_ = `gzcat $maildebug_file|grep 'not allowed'|wc -l`);
s/^\s+//;$BLOCKED_connections = $_;
$allowed_connections_ratio = ($TOTAL_connections-$BLOCKED_connections)/$TOTAL_connections;
$denied_connections_ratio = $BLOCKED_connections/$TOTAL_connections;
$spam_messages_ratio = $SPAM_messages/$TOTAL_messages;
$virus_messages_ratio = $VIRUS_messages/$TOTAL_messages;
# printing
printf "Date\t\t\t$search_date\n";
printf "Server Type\t\t$server_type\n";
printf "Spam Score Detection\t$sa_tag2_level_deflt\n\n";
printf ("Incoming Connections:\t%d Connections\n", $TOTAL_connections);
printf ("Allowed Connections:\t%d Connections\n", $TOTAL_connections-$BLOCKED_connections);
printf ("Blocked Connections:\t%d Connections\n\n", $BLOCKED_connections);
printf ("Incoming Messages:\t%d messages\n", $TOTAL_messages);
printf ("Clean Messages:\t\t%d messages\n", $TOTAL_messages-$SPAM_messages-$VIRUS_messages);
printf ("Spam Messages:\t\t%d messages\n", $SPAM_messages);
printf ("Virus Messages:\t\t%d messages\n\n\n", $VIRUS_messages);
printf (" Unhealth Connections Ratio\t%.2f%\n\n\n", $denied_connections_ratio*100);
printf (" Spam messages Ratio\t\t%.2f%\n", $spam_messages_ratio*100);
printf ("+) Virus messages Ratio\t\t%.2f%\n", $virus_messages_ratio*100);
printf (" ______________________________________\n");
printf (" Unhealth Messages Ratio\t%.2f%\n", $spam_messages_ratio*100+$virus_messages_ratio*100);
| 39.901408 | 99 | 0.702083 |
ed8c550201ed8c6601fcf1459befc2fbdb71b8d6 | 531 | pl | Perl | lib/unicore/lib/Ccc/BR.pl | Helmholtz-HIPS/prosnap | 5286cda39276d5eda85d2ddb23b8ab83c5d4960c | [
"MIT"
] | 15 | 2019-07-10T16:50:17.000Z | 2022-01-23T14:28:17.000Z | lib/unicore/lib/Ccc/BR.pl | Helmholtz-HIPS/prosnap | 5286cda39276d5eda85d2ddb23b8ab83c5d4960c | [
"MIT"
] | 98 | 2017-11-02T19:00:44.000Z | 2022-03-22T16:15:39.000Z | lib/unicore/lib/Ccc/BR.pl | Helmholtz-HIPS/prosnap | 5286cda39276d5eda85d2ddb23b8ab83c5d4960c | [
"MIT"
] | 9 | 2017-10-24T21:53:36.000Z | 2021-11-23T07:36:59.000Z | # !!!!!!! DO NOT EDIT THIS FILE !!!!!!!
# This file is machine-generated by lib/unicore/mktables from the Unicode
# database, Version 8.0.0. Any changes made here will be lost!
# !!!!!!! INTERNAL PERL USE ONLY !!!!!!!
# This file is for internal use by core Perl only. The format and even the
# name or existence of this file are subject to change without notice. Don't
# use it directly. Use Unicode::UCD to access the Unicode character data
# base.
return <<'END';
V8
1434
1435
1453
1454
6457
6458
12333
12334
END
| 23.086957 | 77 | 0.693032 |
edc5bf76dc95847e6e350c35c6469d94bc1bc516 | 4,464 | pm | Perl | slic3r/linux/local-lib/lib/perl5/Wx/Perl/Packager/Linux.pm | pschou/py-sdf | 0a269ed155d026e29429d76666fb63c95d2b4b2c | [
"MIT"
] | null | null | null | slic3r/linux/local-lib/lib/perl5/Wx/Perl/Packager/Linux.pm | pschou/py-sdf | 0a269ed155d026e29429d76666fb63c95d2b4b2c | [
"MIT"
] | null | null | null | slic3r/linux/local-lib/lib/perl5/Wx/Perl/Packager/Linux.pm | pschou/py-sdf | 0a269ed155d026e29429d76666fb63c95d2b4b2c | [
"MIT"
] | null | null | null | ###################################################################################
# Distribution Wx::Perl::Packager
# File Wx/Perl/Packager/Linux.pm
# Description: module for Linux specific handlers
# File Revision: $Id: Linux.pm 48 2010-04-25 00:26:34Z $
# License: This program is free software; you can redistribute it and/or
# modify it under the same terms as Perl itself
# Copyright: Copyright (c) 2006 - 2010 Mark Dootson
###################################################################################
package Wx::Perl::Packager::Linux;
use strict;
use warnings;
require Wx::Perl::Packager::Base;
use base qw( Wx::Perl::Packager::Base );
our $VERSION = '0.27';
sub new {
my $class = shift;
my $self = $class->SUPER::new( @_ );
return $self;
}
sub get_core_modules { (qw( base core adv )) }
sub cleanup_on_exit {
1;
}
sub config_modules {
my $self = shift;
$self->get_modules->{wx} = { filename => 'wxmain.so', loaded => 0, libref => undef, missing_fatal => 0 };
$self->SUPER::config_modules;
}
sub config_system {
my $self = shift;
$self->set_so_module_suffix('.0'); # different linux dists symlink the .so libraries differently
# BAH. the loaders in Wx::Perl::Packager will look for
# modules ending in '.so' - If your modules get packaged
# differently, put the suffix here.
# e.g. if your module when packaged is
# wxlibs_gcc_base.so.0.6.0
# you should $self->set_so_module_suffix('.0.6.0')
$self->set_relocate_pdkcheck(0); # relocate the Wx dlls during PDK Check - never necessary it seems
$self->set_relocate_packaged(1); # relocate the Wx Dlls when running as PerlApp
$self->set_relocate_wx_main(1); # if set_relocate_packaged is true and we find 'wxmain.so'
# as a bound file, we load it as Wx.so ( which it should be
# if user as bound it). This is the current fix for PerlApp
# segmentation fault on exit in Linux. Makes no difference
# in MSWin
$self->set_unlink_relocated(1); # delete the extracted files - ensures relocated are loaded
$self->set_loadmode_pdkcheck('packload'); # standard | nullsub | packload during pdkcheck
# standard uses normal Wx loading
# nullsub - no extensions are loaded
# packload - extensions are loaded by Wx::Perl::Packager
$self->set_loadmode_packaged('packload');# as above, when running as PerlApp
$self->set_loadcore_pdkcheck(1); # use DynaLoader to load wx modules listed by
# get_core_modules method (below)during pdkcheck
$self->set_loadcore_packaged(1); # as above, when running as PerlApp
$self->set_unload_loaded_core(1);# unload any librefs we loaded
# (uses DynaLoader in an END block )
$self->set_unload_loaded_plugins(1); # unload plugins ( html, stc, gl .. etc) that are
# loaded via 'packload'. This seems to be necessary
# to ensure correct unloading order.
# Note - plugins are loaded using
# Wx::_load_plugin (not DynaLoader);
$self->set_pdkcheck_exit(1); # because of the current fault on exit in linux
# you can't package using PerlApp
# this setting calls 'exit(0)' after
# Wx has loaded.
# Drastic - but it is the current hack for this failure on linux
$self->set_pdkcheck_handle(1); # if true, use special handling during pdkcheck
# if false, treat as standard perl ( all other pdkcheck
# options are ignored)
$self->SUPER::config_system;
}
1;
| 45.55102 | 114 | 0.507392 |
edcb7677bb237311a14884f094d0770a581e1b71 | 1,281 | pm | Perl | local/lib/perl5/Date/Manip/TZ/inchri00.pm | jkb78/extrajnm | 6890e38e15f85ea9c09a141aa14affad0b8e91e7 | [
"MIT"
] | null | null | null | local/lib/perl5/Date/Manip/TZ/inchri00.pm | jkb78/extrajnm | 6890e38e15f85ea9c09a141aa14affad0b8e91e7 | [
"MIT"
] | null | null | null | local/lib/perl5/Date/Manip/TZ/inchri00.pm | jkb78/extrajnm | 6890e38e15f85ea9c09a141aa14affad0b8e91e7 | [
"MIT"
] | null | null | null | package #
Date::Manip::TZ::inchri00;
# Copyright (c) 2008-2015 Sullivan Beck. All rights reserved.
# This program is free software; you can redistribute it and/or modify it
# under the same terms as Perl itself.
# This file was automatically generated. Any changes to this file will
# be lost the next time 'tzdata' is run.
# Generated on: Wed Nov 25 11:33:48 EST 2015
# Data version: tzdata2015g
# Code version: tzcode2015g
# This module contains data from the zoneinfo time zone database. The original
# data was obtained from the URL:
# ftp://ftp.iana.org/tz
use strict;
use warnings;
require 5.010000;
our (%Dates,%LastRule);
END {
undef %Dates;
undef %LastRule;
}
our ($VERSION);
$VERSION='6.52';
END { undef $VERSION; }
%Dates = (
1 =>
[
[ [1,1,2,0,0,0],[1,1,2,7,2,52],'+07:02:52',[7,2,52],
'LMT',0,[1895,1,31,16,57,7],[1895,1,31,23,59,59],
'0001010200:00:00','0001010207:02:52','1895013116:57:07','1895013123:59:59' ],
],
1895 =>
[
[ [1895,1,31,16,57,8],[1895,1,31,23,57,8],'+07:00:00',[7,0,0],
'CXT',0,[9999,12,31,0,0,0],[9999,12,31,7,0,0],
'1895013116:57:08','1895013123:57:08','9999123100:00:00','9999123107:00:00' ],
],
);
%LastRule = (
);
1;
| 25.62 | 88 | 0.607338 |
73e8db6418fc8f12605e8782db8079b10ffd3f05 | 32,344 | pm | Perl | code/exiftool/lib/Image/ExifTool/DNG.pm | timlawrenz/depot-photos | a3958c938ddd66acd6f456daec5b31a17264778a | [
"Apache-2.0"
] | 91 | 2015-01-02T11:07:08.000Z | 2022-02-15T05:47:08.000Z | bin/lib/Image/ExifTool/DNG.pm | AndrewHaluza/exif-manager | 437c8a56ff9eb812c09f68518ad2e3b562942585 | [
"MIT"
] | 29 | 2015-08-02T02:46:08.000Z | 2022-03-11T09:52:01.000Z | bin/lib/Image/ExifTool/DNG.pm | AndrewHaluza/exif-manager | 437c8a56ff9eb812c09f68518ad2e3b562942585 | [
"MIT"
] | 18 | 2015-01-02T11:10:53.000Z | 2021-03-29T13:01:14.000Z | #------------------------------------------------------------------------------
# File: DNG.pm
#
# Description: Read DNG-specific information
#
# Revisions: 01/09/2006 - P. Harvey Created
#
# References: 1) http://www.adobe.com/products/dng/
#------------------------------------------------------------------------------
package Image::ExifTool::DNG;
use strict;
use vars qw($VERSION);
use Image::ExifTool qw(:DataAccess :Utils);
use Image::ExifTool::Exif;
use Image::ExifTool::MakerNotes;
use Image::ExifTool::CanonRaw;
$VERSION = '1.23';
sub ProcessOriginalRaw($$$);
sub ProcessAdobeData($$$);
sub ProcessAdobeMakN($$$);
sub ProcessAdobeCRW($$$);
sub ProcessAdobeRAF($$$);
sub ProcessAdobeMRW($$$);
sub ProcessAdobeSR2($$$);
sub ProcessAdobeIFD($$$);
sub WriteAdobeStuff($$$);
# data in OriginalRawFileData
%Image::ExifTool::DNG::OriginalRaw = (
GROUPS => { 2 => 'Image' },
PROCESS_PROC => \&ProcessOriginalRaw,
NOTES => q{
This table defines tags extracted from the DNG OriginalRawFileData
information.
},
0 => { Name => 'OriginalRawImage', Binary => 1 },
1 => { Name => 'OriginalRawResource', Binary => 1 },
2 => 'OriginalRawFileType',
3 => 'OriginalRawCreator',
4 => { Name => 'OriginalTHMImage', Binary => 1 },
5 => { Name => 'OriginalTHMResource', Binary => 1 },
6 => 'OriginalTHMFileType',
7 => 'OriginalTHMCreator',
);
%Image::ExifTool::DNG::AdobeData = ( #PH
GROUPS => { 0 => 'MakerNotes', 1 => 'AdobeDNG', 2 => 'Image' },
PROCESS_PROC => \&ProcessAdobeData,
WRITE_PROC => \&WriteAdobeStuff,
NOTES => q{
This information is found in the "Adobe" DNGPrivateData.
The maker notes ('MakN') are processed by ExifTool, but some information may
have been lost by the Adobe DNG Converter. This is because the Adobe DNG
Converter (as of version 6.3) doesn't properly handle information referenced
from inside the maker notes that lies outside the original maker notes
block. This information is lost when only the maker note block is copied to
the DNG image. While this doesn't effect all makes of cameras, it is a
problem for some major brands such as Olympus and Sony.
Other entries in this table represent proprietary information that is
extracted from the original RAW image and restructured to a different (but
still proprietary) Adobe format.
},
MakN => [ ], # (filled in later)
'CRW ' => {
Name => 'AdobeCRW',
SubDirectory => {
TagTable => 'Image::ExifTool::CanonRaw::Main',
ProcessProc => \&ProcessAdobeCRW,
WriteProc => \&WriteAdobeStuff,
},
},
'MRW ' => {
Name => 'AdobeMRW',
SubDirectory => {
TagTable => 'Image::ExifTool::MinoltaRaw::Main',
ProcessProc => \&ProcessAdobeMRW,
WriteProc => \&WriteAdobeStuff,
},
},
'SR2 ' => {
Name => 'AdobeSR2',
SubDirectory => {
TagTable => 'Image::ExifTool::Sony::SR2Private',
ProcessProc => \&ProcessAdobeSR2,
},
},
'RAF ' => {
Name => 'AdobeRAF',
SubDirectory => {
TagTable => 'Image::ExifTool::FujiFilm::RAF',
ProcessProc => \&ProcessAdobeRAF,
},
},
'Pano' => {
Name => 'AdobePano',
SubDirectory => {
TagTable => 'Image::ExifTool::PanasonicRaw::Main',
ProcessProc => \&ProcessAdobeIFD,
},
},
'Koda' => {
Name => 'AdobeKoda',
SubDirectory => {
TagTable => 'Image::ExifTool::Kodak::IFD',
ProcessProc => \&ProcessAdobeIFD,
},
},
'Leaf' => {
Name => 'AdobeLeaf',
SubDirectory => {
TagTable => 'Image::ExifTool::Leaf::SubIFD',
ProcessProc => \&ProcessAdobeIFD,
},
},
);
# fill in maker notes
{
my $tagInfo;
my $list = $Image::ExifTool::DNG::AdobeData{MakN};
foreach $tagInfo (@Image::ExifTool::MakerNotes::Main) {
unless (ref $tagInfo eq 'HASH') {
push @$list, $tagInfo;
next;
}
my %copy = %$tagInfo;
delete $copy{Groups};
delete $copy{GotGroups};
delete $copy{Table};
push @$list, \%copy;
}
}
#------------------------------------------------------------------------------
# Process DNG OriginalRawFileData information
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success, otherwise returns 0 and sets a Warning
sub ProcessOriginalRaw($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $start = $$dirInfo{DirStart};
my $end = $start + $$dirInfo{DirLen};
my $pos = $start;
my ($index, $err);
SetByteOrder('MM'); # pointers are always big-endian in this structure
for ($index=0; $index<8; ++$index) {
last if $pos + 4 > $end;
my $val = Get32u($dataPt, $pos);
$val or $pos += 4, next; # ignore zero values
my $tagInfo = $et->GetTagInfo($tagTablePtr, $index);
$tagInfo or $err = "Missing DNG tag $index", last;
if ($index & 0x02) {
# extract a simple file type (tags 2, 3, 6 and 7)
$val = substr($$dataPt, $pos, 4);
$pos += 4;
} else {
# extract a compressed data block (tags 0, 1, 4 and 5)
my $n = int(($val + 65535) / 65536);
my $hdrLen = 4 * ($n + 2);
$pos + $hdrLen > $end and $err = '', last;
my $tag = $$tagInfo{Name};
# only extract this information if requested (because it takes time)
my $lcTag = lc $tag;
if (($$et{OPTIONS}{Binary} and not $$et{EXCL_TAG_LOOKUP}{$lcTag}) or
$$et{REQ_TAG_LOOKUP}{$lcTag})
{
unless (eval { require Compress::Zlib }) {
$err = 'Install Compress::Zlib to extract compressed images';
last;
}
my $i;
$val = '';
my $p2 = $pos + Get32u($dataPt, $pos + 4);
for ($i=0; $i<$n; ++$i) {
# inflate this compressed block
my $p1 = $p2;
$p2 = $pos + Get32u($dataPt, $pos + ($i + 2) * 4);
if ($p1 >= $p2 or $p2 > $end) {
$err = 'Bad compressed RAW image';
last;
}
my $buff = substr($$dataPt, $p1, $p2 - $p1);
my ($v2, $stat);
my $inflate = Compress::Zlib::inflateInit();
$inflate and ($v2, $stat) = $inflate->inflate($buff);
if ($inflate and $stat == Compress::Zlib::Z_STREAM_END()) {
$val .= $v2;
} else {
$err = 'Error inflating compressed RAW image';
last;
}
}
$pos = $p2;
} else {
$pos + $hdrLen > $end and $err = '', last;
my $len = Get32u($dataPt, $pos + $hdrLen - 4);
$pos + $len > $end and $err = '', last;
$val = substr($$dataPt, $pos + $hdrLen, $len - $hdrLen);
$val = "Binary data $len bytes";
$pos += $len; # skip over this block
}
}
$et->FoundTag($tagInfo, $val);
}
$et->Warn($err || 'Bad OriginalRawFileData') if defined $err;
return 1;
}
#------------------------------------------------------------------------------
# Process Adobe DNGPrivateData directory
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success
sub ProcessAdobeData($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $dataPos = $$dirInfo{DataPos};
my $pos = $$dirInfo{DirStart};
my $end = $$dirInfo{DirLen} + $pos;
my $outfile = $$dirInfo{OutFile};
my $verbose = $et->Options('Verbose');
my $htmlDump = $et->Options('HtmlDump');
return 0 unless $$dataPt =~ /^Adobe\0/;
unless ($outfile) {
$et->VerboseDir($dirInfo);
# don't parse makernotes if FastScan > 1
my $fast = $et->Options('FastScan');
return 1 if $fast and $fast > 1;
}
$htmlDump and $et->HDump($dataPos, 6, 'Adobe DNGPrivateData header');
SetByteOrder('MM'); # always big endian
$pos += 6;
while ($pos + 8 <= $end) {
my ($tag, $size) = unpack("x${pos}a4N", $$dataPt);
$pos += 8;
last if $pos + $size > $end;
my $tagInfo = $$tagTablePtr{$tag};
if ($htmlDump) {
my $name = "Adobe$tag";
$name =~ tr/ //d;
$et->HDump($dataPos + $pos - 8, 8, "$name header", "Data Size: $size bytes");
# dump non-EXIF format data
unless ($tag =~ /^(MakN|SR2 )$/) {
$et->HDump($dataPos + $pos, $size, "$name data");
}
}
if ($verbose and not $outfile) {
$tagInfo or $et->VPrint(0, "$$et{INDENT}Unsupported DNGAdobeData record: ($tag)\n");
$et->VerboseInfo($tag,
ref $tagInfo eq 'HASH' ? $tagInfo : undef,
DataPt => $dataPt,
DataPos => $dataPos,
Start => $pos,
Size => $size,
);
}
my $value;
while ($tagInfo) {
my ($subTable, $subName, $processProc);
if (ref $tagInfo eq 'HASH') {
unless ($$tagInfo{SubDirectory}) {
if ($outfile) {
# copy value across to outfile
$value = substr($$dataPt, $pos, $size);
} else {
$et->HandleTag($tagTablePtr, $tag, substr($$dataPt, $pos, $size));
}
last;
}
$subTable = GetTagTable($tagInfo->{SubDirectory}->{TagTable});
$subName = $$tagInfo{Name};
$processProc = $tagInfo->{SubDirectory}->{ProcessProc};
} else {
$subTable = $tagTablePtr;
$subName = 'AdobeMakN';
$processProc = \&ProcessAdobeMakN;
}
my %dirInfo = (
Base => $$dirInfo{Base},
DataPt => $dataPt,
DataPos => $dataPos,
DataLen => $$dirInfo{DataLen},
DirStart => $pos,
DirLen => $size,
DirName => $subName,
);
if ($outfile) {
$dirInfo{Proc} = $processProc; # WriteAdobeStuff() calls this to do the actual writing
$value = $et->WriteDirectory(\%dirInfo, $subTable, \&WriteAdobeStuff);
# use old directory if an error occurred
defined $value or $value = substr($$dataPt, $pos, $size);
} else {
# override process proc for MakN
$et->ProcessDirectory(\%dirInfo, $subTable, $processProc);
}
last;
}
if (defined $value and length $value) {
# add "Adobe" header if necessary
$$outfile = "Adobe\0" unless $$outfile and length $$outfile;
$$outfile .= $tag . pack('N', length $value) . $value;
$$outfile .= "\0" if length($value) & 0x01; # pad if necessary
}
$pos += $size;
++$pos if $size & 0x01; # (darn padding)
}
$pos == $end or $et->Warn("$pos $end Adobe private data is corrupt");
return 1;
}
#------------------------------------------------------------------------------
# Process Adobe CRW directory
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success, otherwise returns 0 and sets a Warning
# Notes: data has 4 byte header (2 for byte order and 2 for entry count)
# - this routine would be as simple as ProcessAdobeMRW() below if Adobe hadn't
# pulled the bonehead move of reformatting the CRW information
sub ProcessAdobeCRW($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $start = $$dirInfo{DirStart};
my $end = $start + $$dirInfo{DirLen};
my $verbose = $et->Options('Verbose');
my $buildMakerNotes = $et->Options('MakerNotes');
my $outfile = $$dirInfo{OutFile};
my ($newTags, $oldChanged);
SetByteOrder('MM'); # always big endian
return 0 if $$dirInfo{DirLen} < 4;
my $byteOrder = substr($$dataPt, $start, 2);
return 0 unless $byteOrder =~ /^(II|MM)$/;
# initialize maker note data if building maker notes
$buildMakerNotes and Image::ExifTool::CanonRaw::InitMakerNotes($et);
my $entries = Get16u($dataPt, $start + 2);
my $pos = $start + 4;
$et->VerboseDir($dirInfo, $entries) unless $outfile;
if ($outfile) {
# get hash of new tags
$newTags = $et->GetNewTagInfoHash($tagTablePtr);
$$outfile = substr($$dataPt, $start, 4);
$oldChanged = $$et{CHANGED};
}
# loop through entries in Adobe CRW information
my $index;
for ($index=0; $index<$entries; ++$index) {
last if $pos + 6 > $end;
my $tag = Get16u($dataPt, $pos);
my $size = Get32u($dataPt, $pos + 2);
$pos += 6;
last if $pos + $size > $end;
my $value = substr($$dataPt, $pos, $size);
my $tagID = $tag & 0x3fff;
my $tagType = ($tag >> 8) & 0x38; # get tag type
my $format = $Image::ExifTool::CanonRaw::crwTagFormat{$tagType};
my $count;
my $tagInfo = $et->GetTagInfo($tagTablePtr, $tagID, \$value);
if ($tagInfo) {
$format = $$tagInfo{Format} if $$tagInfo{Format};
$count = $$tagInfo{Count};
}
# set count to 1 by default for values that were in the directory entry
if (not defined $count and $tag & 0x4000 and $format and $format ne 'string') {
$count = 1;
}
# set count from tagInfo count if necessary
if ($format and not $count) {
# set count according to format and size
my $fnum = $Image::ExifTool::Exif::formatNumber{$format};
my $fsiz = $Image::ExifTool::Exif::formatSize[$fnum];
$count = int($size / $fsiz);
}
$format or $format = 'undef';
SetByteOrder($byteOrder);
my $val = ReadValue(\$value, 0, $format, $count, $size);
if ($outfile) {
if ($tagInfo) {
my $subdir = $$tagInfo{SubDirectory};
if ($subdir and $$subdir{TagTable}) {
my $name = $$tagInfo{Name};
my $newTagTable = GetTagTable($$subdir{TagTable});
return 0 unless $newTagTable;
my $subdirStart = 0;
#### eval Start ()
$subdirStart = eval $$subdir{Start} if $$subdir{Start};
my $dirData = \$value;
my %subdirInfo = (
Name => $name,
DataPt => $dirData,
DataLen => $size,
DirStart => $subdirStart,
DirLen => $size - $subdirStart,
Parent => $$dirInfo{DirName},
);
#### eval Validate ($dirData, $subdirStart, $size)
if (defined $$subdir{Validate} and not eval $$subdir{Validate}) {
$et->Warn("Invalid $name data");
} else {
$subdir = $et->WriteDirectory(\%subdirInfo, $newTagTable);
if (defined $subdir and length $subdir) {
if ($subdirStart) {
# add header before data directory
$value = substr($value, 0, $subdirStart) . $subdir;
} else {
$value = $subdir;
}
}
}
} elsif ($$newTags{$tagID}) {
my $nvHash = $et->GetNewValueHash($tagInfo);
if ($et->IsOverwriting($nvHash, $val)) {
my $newVal = $et->GetNewValue($nvHash);
my $verboseVal;
$verboseVal = $newVal if $verbose > 1;
# convert to specified format if necessary
if (defined $newVal and $format) {
$newVal = WriteValue($newVal, $format, $count);
}
if (defined $newVal) {
$et->VerboseValue("- CanonRaw:$$tagInfo{Name}", $value);
$et->VerboseValue("+ CanonRaw:$$tagInfo{Name}", $verboseVal);
$value = $newVal;
++$$et{CHANGED};
}
}
}
}
# write out new value (always big-endian)
SetByteOrder('MM');
# (verified that there is no padding here)
$$outfile .= Set16u($tag) . Set32u(length($value)) . $value;
} else {
$et->HandleTag($tagTablePtr, $tagID, $val,
Index => $index,
DataPt => $dataPt,
DataPos => $$dirInfo{DataPos},
Start => $pos,
Size => $size,
TagInfo => $tagInfo,
);
if ($buildMakerNotes) {
# build maker notes information if requested
Image::ExifTool::CanonRaw::BuildMakerNotes($et, $tagID, $tagInfo,
\$value, $format, $count);
}
}
# (we lost the directory structure, but the second tag 0x0805
# should be in the ImageDescription directory)
$$et{DIR_NAME} = 'ImageDescription' if $tagID == 0x0805;
SetByteOrder('MM');
$pos += $size;
}
if ($outfile and (not defined $$outfile or $index != $entries or
$$et{CHANGED} == $oldChanged))
{
$$et{CHANGED} = $oldChanged; # nothing changed
undef $$outfile; # rewrite old directory
}
if ($index != $entries) {
$et->Warn('Truncated CRW notes');
} elsif ($pos < $end) {
$et->Warn($end-$pos . ' extra bytes at end of CRW notes');
}
# finish building maker notes if necessary
if ($buildMakerNotes) {
SetByteOrder($byteOrder);
Image::ExifTool::CanonRaw::SaveMakerNotes($et);
}
return 1;
}
#------------------------------------------------------------------------------
# Process Adobe MRW directory
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success, otherwise returns 0 and sets a Warning
# Notes: data has 4 byte header (2 for byte order and 2 for entry count)
sub ProcessAdobeMRW($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $dirLen = $$dirInfo{DirLen};
my $dirStart = $$dirInfo{DirStart};
my $outfile = $$dirInfo{OutFile};
# construct fake MRW file
my $buff = "\0MRM" . pack('N', $dirLen - 4);
# ignore leading byte order and directory count words
$buff .= substr($$dataPt, $dirStart + 4, $dirLen - 4);
my $raf = new File::RandomAccess(\$buff);
my %dirInfo = ( RAF => $raf, OutFile => $outfile );
my $rtnVal = Image::ExifTool::MinoltaRaw::ProcessMRW($et, \%dirInfo);
if ($outfile and defined $$outfile and length $$outfile) {
# remove MRW header and add Adobe header
$$outfile = substr($$dataPt, $dirStart, 4) . substr($$outfile, 8);
}
return $rtnVal;
}
#------------------------------------------------------------------------------
# Process Adobe RAF directory
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success, otherwise returns 0 and sets a Warning
sub ProcessAdobeRAF($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
return 0 if $$dirInfo{OutFile}; # (can't write this yet)
my $dataPt = $$dirInfo{DataPt};
my $pos = $$dirInfo{DirStart};
my $dirEnd = $$dirInfo{DirLen} + $pos;
my ($readIt, $warn);
# set byte order according to first 2 bytes of Adobe RAF data
if ($pos + 2 <= $dirEnd and SetByteOrder(substr($$dataPt, $pos, 2))) {
$pos += 2;
} else {
$et->Warn('Invalid DNG RAF data');
return 0;
}
$et->VerboseDir($dirInfo);
# make fake RAF object for processing (same acronym, different meaning)
my $raf = new File::RandomAccess($dataPt);
my $num = '';
# loop through all records in Adobe RAF data:
# 0 - RAF table (not processed)
# 1 - first RAF directory
# 2 - second RAF directory (if available)
for (;;) {
last if $pos + 4 > $dirEnd;
my $len = Get32u($dataPt, $pos);
$pos += 4 + $len; # step to next entry in Adobe RAF record
$len or last; # ends with an empty entry
$readIt or $readIt = 1, next; # ignore first entry (RAF table)
my %dirInfo = (
RAF => $raf,
DirStart => $pos - $len,
);
$$et{SET_GROUP1} = "RAF$num";
$et->ProcessDirectory(\%dirInfo, $tagTablePtr) or $warn = 1;
delete $$et{SET_GROUP1};
$num = ($num || 1) + 1;
}
$warn and $et->Warn('Possibly corrupt RAF information');
return 1;
}
#------------------------------------------------------------------------------
# Process Adobe SR2 directory
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success, otherwise returns 0 and sets a Warning
# Notes: data has 6 byte header (2 for byte order and 4 for original offset)
sub ProcessAdobeSR2($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
return 0 if $$dirInfo{OutFile}; # (can't write this yet)
my $dataPt = $$dirInfo{DataPt};
my $start = $$dirInfo{DirStart};
my $len = $$dirInfo{DirLen};
return 0 if $len < 6;
SetByteOrder('MM');
my $originalPos = Get32u($dataPt, $start + 2);
return 0 unless SetByteOrder(substr($$dataPt, $start, 2));
$et->VerboseDir($dirInfo);
my $dataPos = $$dirInfo{DataPos};
my $dirStart = $start + 6; # pointer to maker note directory
my $dirLen = $len - 6;
# initialize subdirectory information
my $fix = $dataPos + $dirStart - $originalPos;
my %subdirInfo = (
DirName => 'AdobeSR2',
Base => $$dirInfo{Base} + $fix,
DataPt => $dataPt,
DataPos => $dataPos - $fix,
DataLen => $$dirInfo{DataLen},
DirStart => $dirStart,
DirLen => $dirLen,
Parent => $$dirInfo{DirName},
);
if ($et->Options('HtmlDump')) {
$et->HDump($dataPos + $start, 6, 'Adobe SR2 data');
}
# parse the SR2 directory
$et->ProcessDirectory(\%subdirInfo, $tagTablePtr);
return 1;
}
#------------------------------------------------------------------------------
# Process Adobe-mutilated IFD directory
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success, otherwise returns 0 and sets a Warning
# Notes: data has 2 byte header (byte order of the data)
sub ProcessAdobeIFD($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
return 0 if $$dirInfo{OutFile}; # (can't write this yet)
my $dataPt = $$dirInfo{DataPt};
my $pos = $$dirInfo{DirStart};
my $dataPos = $$dirInfo{DataPos};
return 0 if $$dirInfo{DirLen} < 4;
my $dataOrder = substr($$dataPt, $pos, 2);
return 0 unless SetByteOrder($dataOrder); # validate byte order of data
# parse the mutilated IFD. This is similar to a TIFF IFD, except:
# - data follows directly after Count entry in IFD
# - byte order of IFD entries is always big-endian, but byte order of data changes
SetByteOrder('MM'); # IFD structure is always big-endian
my $entries = Get16u($dataPt, $pos + 2);
$et->VerboseDir($dirInfo, $entries);
$pos += 4;
my $end = $pos + $$dirInfo{DirLen};
my $index;
for ($index=0; $index<$entries; ++$index) {
last if $pos + 8 > $end;
SetByteOrder('MM'); # directory entries always big-endian (doh!)
my $tagID = Get16u($dataPt, $pos);
my $format = Get16u($dataPt, $pos+2);
my $count = Get32u($dataPt, $pos+4);
if ($format < 1 or $format > 13) {
# warn unless the IFD was just padded with zeros
$format and $et->Warn(
sprintf("Unknown format ($format) for $$dirInfo{DirName} tag 0x%x",$tagID));
return 0; # must be corrupted
}
my $size = $Image::ExifTool::Exif::formatSize[$format] * $count;
last if $pos + 8 + $size > $end;
my $formatStr = $Image::ExifTool::Exif::formatName[$format];
SetByteOrder($dataOrder); # data stored in native order
my $val = ReadValue($dataPt, $pos + 8, $formatStr, $count, $size);
$et->HandleTag($tagTablePtr, $tagID, $val,
Index => $index,
DataPt => $dataPt,
DataPos => $dataPos,
Start => $pos + 8,
Size => $size
);
$pos += 8 + $size;
}
if ($index < $entries) {
$et->Warn("Truncated $$dirInfo{DirName} directory");
return 0;
}
return 1;
}
#------------------------------------------------------------------------------
# Process Adobe MakerNotes directory
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success, otherwise returns 0 and sets a Warning
# Notes: data has 6 byte header (2 for byte order and 4 for original offset)
# --> or 18 bytes for DNG converted from JPG by Adobe Camera Raw!
sub ProcessAdobeMakN($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $start = $$dirInfo{DirStart};
my $len = $$dirInfo{DirLen};
my $outfile = $$dirInfo{OutFile};
return 0 if $len < 6;
SetByteOrder('MM');
my $originalPos = Get32u($dataPt, $start + 2);
return 0 unless SetByteOrder(substr($$dataPt, $start, 2));
$et->VerboseDir($dirInfo) unless $outfile;
my $dataPos = $$dirInfo{DataPos};
my $hdrLen = 6;
# hack for extra 12 bytes in MakN header of JPEG converted to DNG by Adobe Camera Raw
# (4 bytes "00 00 00 01" followed by 8 unknown bytes)
$hdrLen += 12 if $len >= 18 and substr($$dataPt, $start+6, 4) eq "\0\0\0\x01";
my $dirStart = $start + $hdrLen; # pointer to maker note directory
my $dirLen = $len - $hdrLen;
my $hdr = substr($$dataPt, $dirStart, $dirLen < 48 ? $dirLen : 48);
my $tagInfo = $et->GetTagInfo($tagTablePtr, 'MakN', \$hdr);
return 0 unless $tagInfo and $$tagInfo{SubDirectory};
my $subdir = $$tagInfo{SubDirectory};
my $subTable = GetTagTable($$subdir{TagTable});
# initialize subdirectory information
my %subdirInfo = (
DirName => 'MakerNotes',
Name => $$tagInfo{Name}, # needed for maker notes verbose dump
Base => $$dirInfo{Base},
DataPt => $dataPt,
DataPos => $dataPos,
DataLen => $$dirInfo{DataLen},
DirStart => $dirStart,
DirLen => $dirLen,
TagInfo => $tagInfo,
FixBase => $$subdir{FixBase},
EntryBased=> $$subdir{EntryBased},
Parent => $$dirInfo{DirName},
);
# look for start of maker notes IFD
my $loc = Image::ExifTool::MakerNotes::LocateIFD($et,\%subdirInfo);
unless (defined $loc) {
$et->Warn('Maker notes could not be parsed');
return 0;
}
if ($et->Options('HtmlDump')) {
$et->HDump($dataPos + $start, $hdrLen, 'Adobe MakN data');
$et->HDump($dataPos + $dirStart, $loc, "$$tagInfo{Name} header") if $loc;
}
my $fix = 0;
unless ($$subdir{Base}) {
# adjust base offset for current maker note position
$fix = $dataPos + $dirStart - $originalPos;
$subdirInfo{Base} += $fix;
$subdirInfo{DataPos} -= $fix;
}
if ($outfile) {
# rewrite the maker notes directory
my $fixup = $subdirInfo{Fixup} = new Image::ExifTool::Fixup;
my $oldChanged = $$et{CHANGED};
my $buff = $et->WriteDirectory(\%subdirInfo, $subTable);
# nothing to do if error writing directory or nothing changed
unless (defined $buff and $$et{CHANGED} != $oldChanged) {
$$et{CHANGED} = $oldChanged;
return 1;
}
# deleting maker notes if directory is empty
unless (length $buff) {
$$outfile = '';
return 1;
}
# apply a one-time fixup to offsets
if ($subdirInfo{Relative}) {
# shift all offsets to be relative to new base
my $baseShift = $dataPos + $dirStart + $$dirInfo{Base} - $subdirInfo{Base};
$fixup->{Shift} += $baseShift;
} else {
# shift offsets to position of original maker notes
$fixup->{Shift} += $originalPos;
}
# if we wrote the directory as a block the header is already included
$loc = 0 if $subdirInfo{BlockWrite};
$fixup->{Shift} += $loc; # adjust for makernotes header
$fixup->ApplyFixup(\$buff); # fix up pointer offsets
# get copy of original Adobe header (6 or 18) and makernotes header ($loc)
my $header = substr($$dataPt, $start, $hdrLen + $loc);
# add Adobe and makernotes headers to new directory
$$outfile = $header . $buff;
} else {
# parse the maker notes directory
$et->ProcessDirectory(\%subdirInfo, $subTable, $$subdir{ProcessProc});
# extract maker notes as a block if specified
if ($et->Options('MakerNotes') or
$$et{REQ_TAG_LOOKUP}{lc($$tagInfo{Name})})
{
my $val;
if ($$tagInfo{MakerNotes}) {
$subdirInfo{Base} = $$dirInfo{Base} + $fix;
$subdirInfo{DataPos} = $dataPos - $fix;
$subdirInfo{DirStart} = $dirStart;
$subdirInfo{DirLen} = $dirLen;
# rebuild the maker notes to identify all offsets that require fixing up
$val = Image::ExifTool::Exif::RebuildMakerNotes($et, \%subdirInfo, $subTable);
if (not defined $val and $dirLen > 4) {
$et->Warn('Error rebuilding maker notes (may be corrupt)');
}
} else {
# extract this directory as a block if specified
return 1 unless $$tagInfo{Writable};
}
$val = substr($$dataPt, 20) unless defined $val;
$et->FoundTag($tagInfo, $val);
}
}
return 1;
}
#------------------------------------------------------------------------------
# Write Adobe information (calls appropriate ProcessProc to do the actual work)
# Inputs: 0) ExifTool object ref, 1) source dirInfo ref, 2) tag table ref
# Returns: new data block (may be empty if directory is deleted) or undef on error
sub WriteAdobeStuff($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
$et or return 1; # allow dummy access
my $proc = $$dirInfo{Proc} || \&ProcessAdobeData;
my $buff;
$$dirInfo{OutFile} = \$buff;
&$proc($et, $dirInfo, $tagTablePtr) or undef $buff;
return $buff;
}
1; # end
__END__
=head1 NAME
Image::ExifTool::DNG.pm - Read DNG-specific information
=head1 SYNOPSIS
This module is used by Image::ExifTool
=head1 DESCRIPTION
This module contains routines required by Image::ExifTool to process
information in DNG (Digital Negative) images.
=head1 AUTHOR
Copyright 2003-2021, Phil Harvey (philharvey66 at gmail.com)
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 REFERENCES
=over 4
=item L<http://www.adobe.com/products/dng/>
=back
=head1 SEE ALSO
L<Image::ExifTool::TagNames/DNG Tags>,
L<Image::ExifTool::TagNames/EXIF Tags>,
L<Image::ExifTool(3pm)|Image::ExifTool>
=cut
| 38.367734 | 103 | 0.519478 |
ed768474285abe3e4015946acfb0a41e209e61f0 | 5,720 | pm | Perl | src/perl/hitperl/mnimesh.pm | JulichBrainAtlas/HICoreTools | 2d89d8e83eb54eaf2c58c31abbffce0567c3e784 | [
"Apache-2.0"
] | null | null | null | src/perl/hitperl/mnimesh.pm | JulichBrainAtlas/HICoreTools | 2d89d8e83eb54eaf2c58c31abbffce0567c3e784 | [
"Apache-2.0"
] | null | null | null | src/perl/hitperl/mnimesh.pm | JulichBrainAtlas/HICoreTools | 2d89d8e83eb54eaf2c58c31abbffce0567c3e784 | [
"Apache-2.0"
] | null | null | null | ## hitperl::mnimesh package
########################################################################################################
### >>>
package hitperl::mnimesh;
### >>>
use hitperl;
use File::Path;
use Exporter;
@ISA = ('Exporter');
@EXPORT = ( 'loadFile', 'loadMNIObjFile', 'loadVertexInfoTextFile', 'saveMNIObjFile', );
$VERSION = 0.1;
#### local variables
my $timestamp = sprintf "%06x",int(rand(100000));
my $tmp = "tmp".$timestamp;
#### start public modules
### load mni obj file
# data are stored in a data hash
sub _cleanString {
my $string = shift;
$string =~ s/^\s+//g;
$string =~ s/\s+$//g;
return $string;
}
### >>>
sub loadFile {
my ($filename,$verbose,$debug) = @_;
print " *** WARNING: 'mnimesh::loadFile()' Code has changed ***\n";
my %data = ();
print "mnimesh.loadFile(): Loading mni obj file '".$filename."'...\n" if ( $verbose );
open(FPin,"<$filename") || die "FATAL ERROR: Cannot open mni obj file '".$filename."' for reading: $!";
my $xmin = 1000000000;
my $ymin = $zmin = $xmin;
my $xmax = $ymax = $zmax = -$xmin;
my $tmpline = "";
my $headerline = <FPin>;
die "mnimesh.loadFile(): FATAL ERROR: line=$headerline - invalid syntax in file '".$infile."': $!" unless ( $headerline =~ m/^P/ );
chomp($headerline);
my @headervalues = split(/ /,$headerline);
$nvertices = $headervalues[-1];
print " + loading ".$nvertices." vertices...\n" if ( $verbose );
my @vertices = ();
for ( my $nv=0 ; $nv<$nvertices ; $nv++ ) {
my $vertexline = <FPin>;
chomp($vertexline);
$vertexline = _cleanString($vertexline);
my @thevertices = split(/\ /,$vertexline);
$xmin = $thevertices[0] if ( $thevertices[0]<$xmin );
$xmax = $thevertices[0] if ( $thevertices[0]>$xmax );
$ymin = $thevertices[1] if ( $thevertices[1]<$ymin );
$ymax = $thevertices[1] if ( $thevertices[1]>$ymax );
$zmin = $thevertices[2] if ( $thevertices[2]<$zmin );
$zmax = $thevertices[2] if ( $thevertices[2]>$zmax );
push(@vertices,@thevertices);
# print "DEBUG: vertex = $vertexline\n";
}
$data{"nvertices"} = $nvertices;
@{$data{"vertices"}} = @vertices;
$tmpline = <FPin>;
print " + loading ".$nvertices." normals...\n" if ( $verbose );
my @normals = ();
for ( my $nv=0 ; $nv<$nvertices ; $nv++ ) {
my $normalline = <FPin>;
chomp($normalline);
$normalline = _cleanString($normalline);
push(@normals,split(/\ /,$normalline));
# print "DEBUG: normals = $normalline\n";
}
@{$data{"normals"}} = @normals;
$tmpline = <FPin>;
my $simplexdim = <FPin>;
chomp($simplexdim);
$simplexdim = _cleanString($simplexdim);
$data{"nfaces"} = $simplexdim;
$tmpline = <FPin>;
$tmpline = <FPin>;
my @simplices = ();
if ( $verbose ) {
print " + loading ".$simplexdim." simplices...\n";
print " + parsing simplex dimension info lines...\n";
}
while ( <FPin> ) {
my $datastring = _cleanString($_);
last if ( length($datastring)==0 );
}
print " + parsing simplex info...\n" if ( $verbose );
while ( <FPin> ) {
push(@simplices,split(/\ /,_cleanString($_)));
}
@{$data{"simplices"}} = @simplices;
close(FPin);
print " got ".(scalar(@{$data{"simplices"}})/3)." simplices.\n" if ( $verbose );
@{$data{"range"}} = ($xmin,$xmax,$ymin,$ymax,$zmin,$zmax);
return %data;
}
sub loadMNIObjFile {
my ($filename,$verbose,$debug) = @_;
return loadFile($filename,$verbose,$debug);
}
###
sub loadVertexInfoTextFile {
my ($filename,$verbose) = @_;
my @values = ();
print "mnimesh.loadVertexInfoTextFile(): Loading vertex info file '".$filename."'...\n" if ( $verbose );
open(FPin,"<$filename") || die "FATAL ERROR: Cannot open vertex info file '".$filename."' for reading: $!";
while ( <FPin> ) {
chomp($_);
push(@values,$_);
}
close(FPin);
print " > got values for ".@values." vertices!\n" if ( $verbose );
return @values;
}
###
sub saveFile {
my ($filename,$meshdata_ptr,$verbose,$debug) = @_;
my %meshdata = %{$meshdata_ptr};
my @vertices = @{$meshdata{"vertices"}};
my @normals = @{$meshdata{"normals"}};
my @simplices = @{$meshdata{"simplices"}};
my $nvertices = $meshdata{"nvertices"};
my $nfaces = $meshdata{"nfaces"};
open(FPout,">$filename") || die "FATAL ERROR: Cannot save mni obj file '".$filename."': $!";
print FPout "P 0.3 0.3 0.4 10 1 ".$nvertices."\n"; ### aspect ???????
for ( my $i=0 ; $i<(3*$nvertices) ; $i+=3 ) {
print FPout " ".$vertices[$i]." ".$vertices[$i+1]." ".$vertices[$i+2]."\n";
}
print FPout "\n";
for ( my $i=0 ; $i<(3*$nvertices) ; $i+=3 ) {
print FPout " ".$normals[$i]." ".$normals[$i+1]." ".$normals[$i+2]."\n";
}
print FPout "\n";
print FPout $nfaces."\n";
print FPout "0 1 1 1 1\n";
print FPout "\n";
my $nelements = 8;
# save simplex topo info
my $kk = 3;
my $n3faces = 3*$nfaces;
for ( my $i=0 ; $i<$nfaces ; $i+=$nelements ) {
my $dataline = "";
for ( my $k=0 ; ($k<$nelements && $kk<=$n3faces); $k++ ) {
$dataline .= " ".$kk;
$kk += 3;
}
print FPout $dataline."\n";
}
print FPout "\n";
# save simplex info
for ( my $i=0 ; $i<(3*$nfaces) ; $i+=$nelements ) {
my $dataline = "";
for ( my $k=0 ; $k<$nelements ; $k++ ) {
$dataline .= " ".$simplices[$i+$k];
}
print FPout $dataline."\n"; ## if ( $k<$nelements-1 );
}
close(FPout);
return 1;
}
sub saveMNIObjFile {
my ($filename,$meshdata_ptr,$verbose,$debug) = @_;
return saveFile($filename,$meshdata_ptr,$verbose,$debug);
}
#### end of modules
sub _debug { warn "@_\n" if $DEBUG; }
### return value (required to evaluate to TRUE)
1;
| 32.134831 | 134 | 0.556469 |
73ea96d85acd38824de8626f770ca26cccc98c39 | 982 | pm | Perl | modules/scripting/perl/lib/Atheme/Internal/Hooklist.pm | spb/atheme | db13ae4262c94b65a6d254e6f9b09d8bfe03f79d | [
"0BSD"
] | 6 | 2021-06-24T00:10:30.000Z | 2021-11-05T13:38:36.000Z | modules/scripting/perl/lib/Atheme/Internal/Hooklist.pm | spb/atheme | db13ae4262c94b65a6d254e6f9b09d8bfe03f79d | [
"0BSD"
] | null | null | null | modules/scripting/perl/lib/Atheme/Internal/Hooklist.pm | spb/atheme | db13ae4262c94b65a6d254e6f9b09d8bfe03f79d | [
"0BSD"
] | 3 | 2021-05-02T17:09:37.000Z | 2021-05-02T17:10:08.000Z | package Atheme::Internal::Hooklist;
use strict;
use warnings;
use Atheme::Hooks;
use Carp;
sub new {
my ($class, $hookname) = @_;
$class = ref $class || $class;
croak "Tried to construct a hook list without a name" unless $hookname;
return bless { name => $hookname, hooks => [] }, $class;
}
sub add_hook {
my ($self, $hook) = @_;
my ($caller) = caller;
$Atheme::Hooks::hooks_by_package{$caller} ||= [];
push @{$Atheme::Hooks::hooks_by_package{$caller}}, { list => $self, hook => $hook };
if (scalar @{$self->{hooks}} == 0) {
enable_perl_hook_handler($self->{name})
}
push @{$self->{hooks}}, $hook;
}
sub del_hook {
my ($self, $hook) = @_;
my @newhooks;
foreach my $h (@{$self->{hooks}}) {
push @newhooks, $h unless $h == $hook;
}
if (scalar @newhooks == 0) {
disable_perl_hook_handler($self->{name});
}
$self->{hooks} = \@newhooks;
}
sub call_hooks {
my ($self, $arg) = @_;
foreach my $hook (@{$self->{hooks}}) {
$hook->($arg);
}
}
1;
| 17.22807 | 85 | 0.593686 |
edd2511d6191852140e086e43426cee84c354da5 | 540 | pm | Perl | lib/Mouse/Meta/Role/Method.pm | clayne/p5-Mouse | 963c3a248fd786f1d38f297d7a79705a29833ebc | [
"Artistic-1.0"
] | 21 | 2015-05-13T04:45:53.000Z | 2019-07-25T09:43:23.000Z | lib/Mouse/Meta/Role/Method.pm | clayne/p5-Mouse | 963c3a248fd786f1d38f297d7a79705a29833ebc | [
"Artistic-1.0"
] | 48 | 2015-01-19T11:01:58.000Z | 2019-08-13T09:48:13.000Z | lib/Mouse/Meta/Role/Method.pm | clayne/p5-Mouse | 963c3a248fd786f1d38f297d7a79705a29833ebc | [
"Artistic-1.0"
] | 20 | 2015-03-02T04:21:52.000Z | 2019-08-14T03:02:00.000Z | package Mouse::Meta::Role::Method;
use Mouse::Util; # enables strict and warnings
use Mouse::Meta::Method;
our @ISA = qw(Mouse::Meta::Method);
sub _new{
my($class, %args) = @_;
my $self = bless \%args, $class;
if($class ne __PACKAGE__){
$self->meta->_initialize_object($self, \%args);
}
return $self;
}
1;
__END__
=head1 NAME
Mouse::Meta::Role::Method - A Mouse Method metaclass for Roles
=head1 VERSION
This document describes Mouse version v2.5.10
=head1 SEE ALSO
L<Moose::Meta::Role::Method>
=cut
| 15.882353 | 62 | 0.659259 |
edc7edda4ca503a38b2caf70667c830c82d83916 | 328 | pl | Perl | concat.pl | Fernal73/LearnPerl | b11b28c6b6f1e30d84755377f9928ff1f51c6857 | [
"MIT"
] | null | null | null | concat.pl | Fernal73/LearnPerl | b11b28c6b6f1e30d84755377f9928ff1f51c6857 | [
"MIT"
] | null | null | null | concat.pl | Fernal73/LearnPerl | b11b28c6b6f1e30d84755377f9928ff1f51c6857 | [
"MIT"
] | null | null | null | #!/usr/bin/env perl
use strict;
use warnings;
our $VERSION = '1.00';
my $resultstring = q{};
print "Enter your input - type an empty line to quit\n";
my $input = <>;
chomp $input;
while ( $input ne q{} ) {
$resultstring .= $input;
$input = <>;
chomp $input;
}
print "Here is the final string:\n";
print "$resultstring\n";
| 20.5 | 56 | 0.634146 |
eddbe1356d927c42fb494caca20a44ebae9cf893 | 1,374 | t | Perl | t/country-test-sm.t | gregoa/perl-modules-Number-Phone | dd0704e31ff78b949b2d715658ee9607f4eca24f | [
"Apache-2.0"
] | null | null | null | t/country-test-sm.t | gregoa/perl-modules-Number-Phone | dd0704e31ff78b949b2d715658ee9607f4eca24f | [
"Apache-2.0"
] | null | null | null | t/country-test-sm.t | gregoa/perl-modules-Number-Phone | dd0704e31ff78b949b2d715658ee9607f4eca24f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/perl -w
use strict;
use lib 't/inc';
use fatalwarnings;
use Number::Phone::Lib;
use Test::More;
END { done_testing(); }
{
my $np = Number::Phone::Lib->new('SM', '912345');
ok($np->is_fixed_line, '912345 is a fixed line without the 0549 prefix...');
ok(!$np->is_mobile, '...it is not a mobile...');
is($np->format, '+378 0549 912345', '...its international format is correct');
is($np->format_using('National'), '0549 912345', '...as is its national format');
}
{
my $np = Number::Phone::Lib->new('SM', '0549 912345');
ok($np->is_fixed_line, '0549 912345 is a fixed line without the 0549 prefix...');
ok(!$np->is_mobile, '...it is not a mobile...');
is($np->format, '+378 0549 912345', '...its international format is correct');
is($np->format_using('National'), '0549 912345', '...as is its national format');
}
{
my $np = Number::Phone::Lib->new('SM', '66661212');
ok($np->is_mobile, '044 81 1234 5678 is a mobile...');
ok(!$np->is_fixed_line, '...it is not a fixed line...');
is($np->format, '+378 66 66 12 12', '...its international format is correct');
is($np->format_using('National'), '66 66 12 12', '...as is its national format');
}
{
my $np = Number::Phone::Lib->new('SM', '0549 66661212');
ok(!defined $np, '0549 66661212 is a mobile with the 0549 prefix, which is not valid');
}
| 36.157895 | 91 | 0.612082 |
ed81acb5aec0211016957254d1271149d079c491 | 33,819 | pm | Perl | local/lib/perl5/DateTime/TimeZone/America/Juneau.pm | jkb78/extrajnm | 6890e38e15f85ea9c09a141aa14affad0b8e91e7 | [
"MIT"
] | null | null | null | local/lib/perl5/DateTime/TimeZone/America/Juneau.pm | jkb78/extrajnm | 6890e38e15f85ea9c09a141aa14affad0b8e91e7 | [
"MIT"
] | null | null | null | local/lib/perl5/DateTime/TimeZone/America/Juneau.pm | jkb78/extrajnm | 6890e38e15f85ea9c09a141aa14affad0b8e91e7 | [
"MIT"
] | null | null | null | # This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/Q713JNUf8G/northamerica. Olson data version 2016a
#
# Do not edit this file directly.
#
package DateTime::TimeZone::America::Juneau;
$DateTime::TimeZone::America::Juneau::VERSION = '1.95';
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::America::Juneau::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
58910317061, # utc_end 1867-10-17 08:57:41 (Thu)
DateTime::TimeZone::NEG_INFINITY, # local_start
58910371200, # local_end 1867-10-18 00:00:00 (Fri)
54139,
0,
'LMT',
],
[
58910317061, # utc_start 1867-10-17 08:57:41 (Thu)
59946728261, # utc_end 1900-08-20 20:57:41 (Mon)
58910284800, # local_start 1867-10-17 00:00:00 (Thu)
59946696000, # local_end 1900-08-20 12:00:00 (Mon)
-32261,
0,
'LMT',
],
[
59946728261, # utc_start 1900-08-20 20:57:41 (Mon)
61252099200, # utc_end 1942-01-01 08:00:00 (Thu)
59946699461, # local_start 1900-08-20 12:57:41 (Mon)
61252070400, # local_end 1942-01-01 00:00:00 (Thu)
-28800,
0,
'PST',
],
[
61252099200, # utc_start 1942-01-01 08:00:00 (Thu)
61255476000, # utc_end 1942-02-09 10:00:00 (Mon)
61252070400, # local_start 1942-01-01 00:00:00 (Thu)
61255447200, # local_end 1942-02-09 02:00:00 (Mon)
-28800,
0,
'PST',
],
[
61255476000, # utc_start 1942-02-09 10:00:00 (Mon)
61366287600, # utc_end 1945-08-14 23:00:00 (Tue)
61255450800, # local_start 1942-02-09 03:00:00 (Mon)
61366262400, # local_end 1945-08-14 16:00:00 (Tue)
-25200,
1,
'PWT',
],
[
61366287600, # utc_start 1945-08-14 23:00:00 (Tue)
61370298000, # utc_end 1945-09-30 09:00:00 (Sun)
61366262400, # local_start 1945-08-14 16:00:00 (Tue)
61370272800, # local_end 1945-09-30 02:00:00 (Sun)
-25200,
1,
'PPT',
],
[
61370298000, # utc_start 1945-09-30 09:00:00 (Sun)
61378329600, # utc_end 1946-01-01 08:00:00 (Tue)
61370269200, # local_start 1945-09-30 01:00:00 (Sun)
61378300800, # local_end 1946-01-01 00:00:00 (Tue)
-28800,
0,
'PST',
],
[
61378329600, # utc_start 1946-01-01 08:00:00 (Tue)
62104176000, # utc_end 1969-01-01 08:00:00 (Wed)
61378300800, # local_start 1946-01-01 00:00:00 (Tue)
62104147200, # local_end 1969-01-01 00:00:00 (Wed)
-28800,
0,
'PST',
],
[
62104176000, # utc_start 1969-01-01 08:00:00 (Wed)
62114205600, # utc_end 1969-04-27 10:00:00 (Sun)
62104147200, # local_start 1969-01-01 00:00:00 (Wed)
62114176800, # local_end 1969-04-27 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62114205600, # utc_start 1969-04-27 10:00:00 (Sun)
62129926800, # utc_end 1969-10-26 09:00:00 (Sun)
62114180400, # local_start 1969-04-27 03:00:00 (Sun)
62129901600, # local_end 1969-10-26 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62129926800, # utc_start 1969-10-26 09:00:00 (Sun)
62145655200, # utc_end 1970-04-26 10:00:00 (Sun)
62129898000, # local_start 1969-10-26 01:00:00 (Sun)
62145626400, # local_end 1970-04-26 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62145655200, # utc_start 1970-04-26 10:00:00 (Sun)
62161376400, # utc_end 1970-10-25 09:00:00 (Sun)
62145630000, # local_start 1970-04-26 03:00:00 (Sun)
62161351200, # local_end 1970-10-25 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62161376400, # utc_start 1970-10-25 09:00:00 (Sun)
62177104800, # utc_end 1971-04-25 10:00:00 (Sun)
62161347600, # local_start 1970-10-25 01:00:00 (Sun)
62177076000, # local_end 1971-04-25 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62177104800, # utc_start 1971-04-25 10:00:00 (Sun)
62193430800, # utc_end 1971-10-31 09:00:00 (Sun)
62177079600, # local_start 1971-04-25 03:00:00 (Sun)
62193405600, # local_end 1971-10-31 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62193430800, # utc_start 1971-10-31 09:00:00 (Sun)
62209159200, # utc_end 1972-04-30 10:00:00 (Sun)
62193402000, # local_start 1971-10-31 01:00:00 (Sun)
62209130400, # local_end 1972-04-30 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62209159200, # utc_start 1972-04-30 10:00:00 (Sun)
62224880400, # utc_end 1972-10-29 09:00:00 (Sun)
62209134000, # local_start 1972-04-30 03:00:00 (Sun)
62224855200, # local_end 1972-10-29 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62224880400, # utc_start 1972-10-29 09:00:00 (Sun)
62240608800, # utc_end 1973-04-29 10:00:00 (Sun)
62224851600, # local_start 1972-10-29 01:00:00 (Sun)
62240580000, # local_end 1973-04-29 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62240608800, # utc_start 1973-04-29 10:00:00 (Sun)
62256330000, # utc_end 1973-10-28 09:00:00 (Sun)
62240583600, # local_start 1973-04-29 03:00:00 (Sun)
62256304800, # local_end 1973-10-28 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62256330000, # utc_start 1973-10-28 09:00:00 (Sun)
62262381600, # utc_end 1974-01-06 10:00:00 (Sun)
62256301200, # local_start 1973-10-28 01:00:00 (Sun)
62262352800, # local_end 1974-01-06 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62262381600, # utc_start 1974-01-06 10:00:00 (Sun)
62287779600, # utc_end 1974-10-27 09:00:00 (Sun)
62262356400, # local_start 1974-01-06 03:00:00 (Sun)
62287754400, # local_end 1974-10-27 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62287779600, # utc_start 1974-10-27 09:00:00 (Sun)
62298064800, # utc_end 1975-02-23 10:00:00 (Sun)
62287750800, # local_start 1974-10-27 01:00:00 (Sun)
62298036000, # local_end 1975-02-23 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62298064800, # utc_start 1975-02-23 10:00:00 (Sun)
62319229200, # utc_end 1975-10-26 09:00:00 (Sun)
62298039600, # local_start 1975-02-23 03:00:00 (Sun)
62319204000, # local_end 1975-10-26 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62319229200, # utc_start 1975-10-26 09:00:00 (Sun)
62334957600, # utc_end 1976-04-25 10:00:00 (Sun)
62319200400, # local_start 1975-10-26 01:00:00 (Sun)
62334928800, # local_end 1976-04-25 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62334957600, # utc_start 1976-04-25 10:00:00 (Sun)
62351283600, # utc_end 1976-10-31 09:00:00 (Sun)
62334932400, # local_start 1976-04-25 03:00:00 (Sun)
62351258400, # local_end 1976-10-31 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62351283600, # utc_start 1976-10-31 09:00:00 (Sun)
62366407200, # utc_end 1977-04-24 10:00:00 (Sun)
62351254800, # local_start 1976-10-31 01:00:00 (Sun)
62366378400, # local_end 1977-04-24 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62366407200, # utc_start 1977-04-24 10:00:00 (Sun)
62382733200, # utc_end 1977-10-30 09:00:00 (Sun)
62366382000, # local_start 1977-04-24 03:00:00 (Sun)
62382708000, # local_end 1977-10-30 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62382733200, # utc_start 1977-10-30 09:00:00 (Sun)
62398461600, # utc_end 1978-04-30 10:00:00 (Sun)
62382704400, # local_start 1977-10-30 01:00:00 (Sun)
62398432800, # local_end 1978-04-30 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62398461600, # utc_start 1978-04-30 10:00:00 (Sun)
62414182800, # utc_end 1978-10-29 09:00:00 (Sun)
62398436400, # local_start 1978-04-30 03:00:00 (Sun)
62414157600, # local_end 1978-10-29 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62414182800, # utc_start 1978-10-29 09:00:00 (Sun)
62429911200, # utc_end 1979-04-29 10:00:00 (Sun)
62414154000, # local_start 1978-10-29 01:00:00 (Sun)
62429882400, # local_end 1979-04-29 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62429911200, # utc_start 1979-04-29 10:00:00 (Sun)
62445632400, # utc_end 1979-10-28 09:00:00 (Sun)
62429886000, # local_start 1979-04-29 03:00:00 (Sun)
62445607200, # local_end 1979-10-28 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62445632400, # utc_start 1979-10-28 09:00:00 (Sun)
62461360800, # utc_end 1980-04-27 10:00:00 (Sun)
62445603600, # local_start 1979-10-28 01:00:00 (Sun)
62461332000, # local_end 1980-04-27 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62461360800, # utc_start 1980-04-27 10:00:00 (Sun)
62477085600, # utc_end 1980-10-26 10:00:00 (Sun)
62461332000, # local_start 1980-04-27 02:00:00 (Sun)
62477056800, # local_end 1980-10-26 02:00:00 (Sun)
-28800,
1,
'YDT',
],
[
62477085600, # utc_start 1980-10-26 10:00:00 (Sun)
62492810400, # utc_end 1981-04-26 10:00:00 (Sun)
62477056800, # local_start 1980-10-26 02:00:00 (Sun)
62492781600, # local_end 1981-04-26 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62492810400, # utc_start 1981-04-26 10:00:00 (Sun)
62508531600, # utc_end 1981-10-25 09:00:00 (Sun)
62492785200, # local_start 1981-04-26 03:00:00 (Sun)
62508506400, # local_end 1981-10-25 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62508531600, # utc_start 1981-10-25 09:00:00 (Sun)
62524260000, # utc_end 1982-04-25 10:00:00 (Sun)
62508502800, # local_start 1981-10-25 01:00:00 (Sun)
62524231200, # local_end 1982-04-25 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62524260000, # utc_start 1982-04-25 10:00:00 (Sun)
62540586000, # utc_end 1982-10-31 09:00:00 (Sun)
62524234800, # local_start 1982-04-25 03:00:00 (Sun)
62540560800, # local_end 1982-10-31 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62540586000, # utc_start 1982-10-31 09:00:00 (Sun)
62555709600, # utc_end 1983-04-24 10:00:00 (Sun)
62540557200, # local_start 1982-10-31 01:00:00 (Sun)
62555680800, # local_end 1983-04-24 02:00:00 (Sun)
-28800,
0,
'PST',
],
[
62555709600, # utc_start 1983-04-24 10:00:00 (Sun)
62572035600, # utc_end 1983-10-30 09:00:00 (Sun)
62555684400, # local_start 1983-04-24 03:00:00 (Sun)
62572010400, # local_end 1983-10-30 02:00:00 (Sun)
-25200,
1,
'PDT',
],
[
62572035600, # utc_start 1983-10-30 09:00:00 (Sun)
62574714000, # utc_end 1983-11-30 09:00:00 (Wed)
62572003200, # local_start 1983-10-30 00:00:00 (Sun)
62574681600, # local_end 1983-11-30 00:00:00 (Wed)
-32400,
0,
'YST',
],
[
62574714000, # utc_start 1983-11-30 09:00:00 (Wed)
62587767600, # utc_end 1984-04-29 11:00:00 (Sun)
62574681600, # local_start 1983-11-30 00:00:00 (Wed)
62587735200, # local_end 1984-04-29 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62587767600, # utc_start 1984-04-29 11:00:00 (Sun)
62603488800, # utc_end 1984-10-28 10:00:00 (Sun)
62587738800, # local_start 1984-04-29 03:00:00 (Sun)
62603460000, # local_end 1984-10-28 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62603488800, # utc_start 1984-10-28 10:00:00 (Sun)
62619217200, # utc_end 1985-04-28 11:00:00 (Sun)
62603456400, # local_start 1984-10-28 01:00:00 (Sun)
62619184800, # local_end 1985-04-28 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62619217200, # utc_start 1985-04-28 11:00:00 (Sun)
62634938400, # utc_end 1985-10-27 10:00:00 (Sun)
62619188400, # local_start 1985-04-28 03:00:00 (Sun)
62634909600, # local_end 1985-10-27 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62634938400, # utc_start 1985-10-27 10:00:00 (Sun)
62650666800, # utc_end 1986-04-27 11:00:00 (Sun)
62634906000, # local_start 1985-10-27 01:00:00 (Sun)
62650634400, # local_end 1986-04-27 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62650666800, # utc_start 1986-04-27 11:00:00 (Sun)
62666388000, # utc_end 1986-10-26 10:00:00 (Sun)
62650638000, # local_start 1986-04-27 03:00:00 (Sun)
62666359200, # local_end 1986-10-26 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62666388000, # utc_start 1986-10-26 10:00:00 (Sun)
62680302000, # utc_end 1987-04-05 11:00:00 (Sun)
62666355600, # local_start 1986-10-26 01:00:00 (Sun)
62680269600, # local_end 1987-04-05 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62680302000, # utc_start 1987-04-05 11:00:00 (Sun)
62697837600, # utc_end 1987-10-25 10:00:00 (Sun)
62680273200, # local_start 1987-04-05 03:00:00 (Sun)
62697808800, # local_end 1987-10-25 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62697837600, # utc_start 1987-10-25 10:00:00 (Sun)
62711751600, # utc_end 1988-04-03 11:00:00 (Sun)
62697805200, # local_start 1987-10-25 01:00:00 (Sun)
62711719200, # local_end 1988-04-03 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62711751600, # utc_start 1988-04-03 11:00:00 (Sun)
62729892000, # utc_end 1988-10-30 10:00:00 (Sun)
62711722800, # local_start 1988-04-03 03:00:00 (Sun)
62729863200, # local_end 1988-10-30 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62729892000, # utc_start 1988-10-30 10:00:00 (Sun)
62743201200, # utc_end 1989-04-02 11:00:00 (Sun)
62729859600, # local_start 1988-10-30 01:00:00 (Sun)
62743168800, # local_end 1989-04-02 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62743201200, # utc_start 1989-04-02 11:00:00 (Sun)
62761341600, # utc_end 1989-10-29 10:00:00 (Sun)
62743172400, # local_start 1989-04-02 03:00:00 (Sun)
62761312800, # local_end 1989-10-29 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62761341600, # utc_start 1989-10-29 10:00:00 (Sun)
62774650800, # utc_end 1990-04-01 11:00:00 (Sun)
62761309200, # local_start 1989-10-29 01:00:00 (Sun)
62774618400, # local_end 1990-04-01 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62774650800, # utc_start 1990-04-01 11:00:00 (Sun)
62792791200, # utc_end 1990-10-28 10:00:00 (Sun)
62774622000, # local_start 1990-04-01 03:00:00 (Sun)
62792762400, # local_end 1990-10-28 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62792791200, # utc_start 1990-10-28 10:00:00 (Sun)
62806705200, # utc_end 1991-04-07 11:00:00 (Sun)
62792758800, # local_start 1990-10-28 01:00:00 (Sun)
62806672800, # local_end 1991-04-07 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62806705200, # utc_start 1991-04-07 11:00:00 (Sun)
62824240800, # utc_end 1991-10-27 10:00:00 (Sun)
62806676400, # local_start 1991-04-07 03:00:00 (Sun)
62824212000, # local_end 1991-10-27 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62824240800, # utc_start 1991-10-27 10:00:00 (Sun)
62838154800, # utc_end 1992-04-05 11:00:00 (Sun)
62824208400, # local_start 1991-10-27 01:00:00 (Sun)
62838122400, # local_end 1992-04-05 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62838154800, # utc_start 1992-04-05 11:00:00 (Sun)
62855690400, # utc_end 1992-10-25 10:00:00 (Sun)
62838126000, # local_start 1992-04-05 03:00:00 (Sun)
62855661600, # local_end 1992-10-25 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62855690400, # utc_start 1992-10-25 10:00:00 (Sun)
62869604400, # utc_end 1993-04-04 11:00:00 (Sun)
62855658000, # local_start 1992-10-25 01:00:00 (Sun)
62869572000, # local_end 1993-04-04 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62869604400, # utc_start 1993-04-04 11:00:00 (Sun)
62887744800, # utc_end 1993-10-31 10:00:00 (Sun)
62869575600, # local_start 1993-04-04 03:00:00 (Sun)
62887716000, # local_end 1993-10-31 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62887744800, # utc_start 1993-10-31 10:00:00 (Sun)
62901054000, # utc_end 1994-04-03 11:00:00 (Sun)
62887712400, # local_start 1993-10-31 01:00:00 (Sun)
62901021600, # local_end 1994-04-03 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62901054000, # utc_start 1994-04-03 11:00:00 (Sun)
62919194400, # utc_end 1994-10-30 10:00:00 (Sun)
62901025200, # local_start 1994-04-03 03:00:00 (Sun)
62919165600, # local_end 1994-10-30 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62919194400, # utc_start 1994-10-30 10:00:00 (Sun)
62932503600, # utc_end 1995-04-02 11:00:00 (Sun)
62919162000, # local_start 1994-10-30 01:00:00 (Sun)
62932471200, # local_end 1995-04-02 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62932503600, # utc_start 1995-04-02 11:00:00 (Sun)
62950644000, # utc_end 1995-10-29 10:00:00 (Sun)
62932474800, # local_start 1995-04-02 03:00:00 (Sun)
62950615200, # local_end 1995-10-29 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62950644000, # utc_start 1995-10-29 10:00:00 (Sun)
62964558000, # utc_end 1996-04-07 11:00:00 (Sun)
62950611600, # local_start 1995-10-29 01:00:00 (Sun)
62964525600, # local_end 1996-04-07 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62964558000, # utc_start 1996-04-07 11:00:00 (Sun)
62982093600, # utc_end 1996-10-27 10:00:00 (Sun)
62964529200, # local_start 1996-04-07 03:00:00 (Sun)
62982064800, # local_end 1996-10-27 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
62982093600, # utc_start 1996-10-27 10:00:00 (Sun)
62996007600, # utc_end 1997-04-06 11:00:00 (Sun)
62982061200, # local_start 1996-10-27 01:00:00 (Sun)
62995975200, # local_end 1997-04-06 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
62996007600, # utc_start 1997-04-06 11:00:00 (Sun)
63013543200, # utc_end 1997-10-26 10:00:00 (Sun)
62995978800, # local_start 1997-04-06 03:00:00 (Sun)
63013514400, # local_end 1997-10-26 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63013543200, # utc_start 1997-10-26 10:00:00 (Sun)
63027457200, # utc_end 1998-04-05 11:00:00 (Sun)
63013510800, # local_start 1997-10-26 01:00:00 (Sun)
63027424800, # local_end 1998-04-05 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63027457200, # utc_start 1998-04-05 11:00:00 (Sun)
63044992800, # utc_end 1998-10-25 10:00:00 (Sun)
63027428400, # local_start 1998-04-05 03:00:00 (Sun)
63044964000, # local_end 1998-10-25 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63044992800, # utc_start 1998-10-25 10:00:00 (Sun)
63058906800, # utc_end 1999-04-04 11:00:00 (Sun)
63044960400, # local_start 1998-10-25 01:00:00 (Sun)
63058874400, # local_end 1999-04-04 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63058906800, # utc_start 1999-04-04 11:00:00 (Sun)
63077047200, # utc_end 1999-10-31 10:00:00 (Sun)
63058878000, # local_start 1999-04-04 03:00:00 (Sun)
63077018400, # local_end 1999-10-31 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63077047200, # utc_start 1999-10-31 10:00:00 (Sun)
63090356400, # utc_end 2000-04-02 11:00:00 (Sun)
63077014800, # local_start 1999-10-31 01:00:00 (Sun)
63090324000, # local_end 2000-04-02 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63090356400, # utc_start 2000-04-02 11:00:00 (Sun)
63108496800, # utc_end 2000-10-29 10:00:00 (Sun)
63090327600, # local_start 2000-04-02 03:00:00 (Sun)
63108468000, # local_end 2000-10-29 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63108496800, # utc_start 2000-10-29 10:00:00 (Sun)
63121806000, # utc_end 2001-04-01 11:00:00 (Sun)
63108464400, # local_start 2000-10-29 01:00:00 (Sun)
63121773600, # local_end 2001-04-01 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63121806000, # utc_start 2001-04-01 11:00:00 (Sun)
63139946400, # utc_end 2001-10-28 10:00:00 (Sun)
63121777200, # local_start 2001-04-01 03:00:00 (Sun)
63139917600, # local_end 2001-10-28 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63139946400, # utc_start 2001-10-28 10:00:00 (Sun)
63153860400, # utc_end 2002-04-07 11:00:00 (Sun)
63139914000, # local_start 2001-10-28 01:00:00 (Sun)
63153828000, # local_end 2002-04-07 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63153860400, # utc_start 2002-04-07 11:00:00 (Sun)
63171396000, # utc_end 2002-10-27 10:00:00 (Sun)
63153831600, # local_start 2002-04-07 03:00:00 (Sun)
63171367200, # local_end 2002-10-27 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63171396000, # utc_start 2002-10-27 10:00:00 (Sun)
63185310000, # utc_end 2003-04-06 11:00:00 (Sun)
63171363600, # local_start 2002-10-27 01:00:00 (Sun)
63185277600, # local_end 2003-04-06 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63185310000, # utc_start 2003-04-06 11:00:00 (Sun)
63202845600, # utc_end 2003-10-26 10:00:00 (Sun)
63185281200, # local_start 2003-04-06 03:00:00 (Sun)
63202816800, # local_end 2003-10-26 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63202845600, # utc_start 2003-10-26 10:00:00 (Sun)
63216759600, # utc_end 2004-04-04 11:00:00 (Sun)
63202813200, # local_start 2003-10-26 01:00:00 (Sun)
63216727200, # local_end 2004-04-04 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63216759600, # utc_start 2004-04-04 11:00:00 (Sun)
63234900000, # utc_end 2004-10-31 10:00:00 (Sun)
63216730800, # local_start 2004-04-04 03:00:00 (Sun)
63234871200, # local_end 2004-10-31 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63234900000, # utc_start 2004-10-31 10:00:00 (Sun)
63248209200, # utc_end 2005-04-03 11:00:00 (Sun)
63234867600, # local_start 2004-10-31 01:00:00 (Sun)
63248176800, # local_end 2005-04-03 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63248209200, # utc_start 2005-04-03 11:00:00 (Sun)
63266349600, # utc_end 2005-10-30 10:00:00 (Sun)
63248180400, # local_start 2005-04-03 03:00:00 (Sun)
63266320800, # local_end 2005-10-30 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63266349600, # utc_start 2005-10-30 10:00:00 (Sun)
63279658800, # utc_end 2006-04-02 11:00:00 (Sun)
63266317200, # local_start 2005-10-30 01:00:00 (Sun)
63279626400, # local_end 2006-04-02 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63279658800, # utc_start 2006-04-02 11:00:00 (Sun)
63297799200, # utc_end 2006-10-29 10:00:00 (Sun)
63279630000, # local_start 2006-04-02 03:00:00 (Sun)
63297770400, # local_end 2006-10-29 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63297799200, # utc_start 2006-10-29 10:00:00 (Sun)
63309294000, # utc_end 2007-03-11 11:00:00 (Sun)
63297766800, # local_start 2006-10-29 01:00:00 (Sun)
63309261600, # local_end 2007-03-11 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63309294000, # utc_start 2007-03-11 11:00:00 (Sun)
63329853600, # utc_end 2007-11-04 10:00:00 (Sun)
63309265200, # local_start 2007-03-11 03:00:00 (Sun)
63329824800, # local_end 2007-11-04 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63329853600, # utc_start 2007-11-04 10:00:00 (Sun)
63340743600, # utc_end 2008-03-09 11:00:00 (Sun)
63329821200, # local_start 2007-11-04 01:00:00 (Sun)
63340711200, # local_end 2008-03-09 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63340743600, # utc_start 2008-03-09 11:00:00 (Sun)
63361303200, # utc_end 2008-11-02 10:00:00 (Sun)
63340714800, # local_start 2008-03-09 03:00:00 (Sun)
63361274400, # local_end 2008-11-02 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63361303200, # utc_start 2008-11-02 10:00:00 (Sun)
63372193200, # utc_end 2009-03-08 11:00:00 (Sun)
63361270800, # local_start 2008-11-02 01:00:00 (Sun)
63372160800, # local_end 2009-03-08 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63372193200, # utc_start 2009-03-08 11:00:00 (Sun)
63392752800, # utc_end 2009-11-01 10:00:00 (Sun)
63372164400, # local_start 2009-03-08 03:00:00 (Sun)
63392724000, # local_end 2009-11-01 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63392752800, # utc_start 2009-11-01 10:00:00 (Sun)
63404247600, # utc_end 2010-03-14 11:00:00 (Sun)
63392720400, # local_start 2009-11-01 01:00:00 (Sun)
63404215200, # local_end 2010-03-14 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63404247600, # utc_start 2010-03-14 11:00:00 (Sun)
63424807200, # utc_end 2010-11-07 10:00:00 (Sun)
63404218800, # local_start 2010-03-14 03:00:00 (Sun)
63424778400, # local_end 2010-11-07 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63424807200, # utc_start 2010-11-07 10:00:00 (Sun)
63435697200, # utc_end 2011-03-13 11:00:00 (Sun)
63424774800, # local_start 2010-11-07 01:00:00 (Sun)
63435664800, # local_end 2011-03-13 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63435697200, # utc_start 2011-03-13 11:00:00 (Sun)
63456256800, # utc_end 2011-11-06 10:00:00 (Sun)
63435668400, # local_start 2011-03-13 03:00:00 (Sun)
63456228000, # local_end 2011-11-06 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63456256800, # utc_start 2011-11-06 10:00:00 (Sun)
63467146800, # utc_end 2012-03-11 11:00:00 (Sun)
63456224400, # local_start 2011-11-06 01:00:00 (Sun)
63467114400, # local_end 2012-03-11 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63467146800, # utc_start 2012-03-11 11:00:00 (Sun)
63487706400, # utc_end 2012-11-04 10:00:00 (Sun)
63467118000, # local_start 2012-03-11 03:00:00 (Sun)
63487677600, # local_end 2012-11-04 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63487706400, # utc_start 2012-11-04 10:00:00 (Sun)
63498596400, # utc_end 2013-03-10 11:00:00 (Sun)
63487674000, # local_start 2012-11-04 01:00:00 (Sun)
63498564000, # local_end 2013-03-10 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63498596400, # utc_start 2013-03-10 11:00:00 (Sun)
63519156000, # utc_end 2013-11-03 10:00:00 (Sun)
63498567600, # local_start 2013-03-10 03:00:00 (Sun)
63519127200, # local_end 2013-11-03 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63519156000, # utc_start 2013-11-03 10:00:00 (Sun)
63530046000, # utc_end 2014-03-09 11:00:00 (Sun)
63519123600, # local_start 2013-11-03 01:00:00 (Sun)
63530013600, # local_end 2014-03-09 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63530046000, # utc_start 2014-03-09 11:00:00 (Sun)
63550605600, # utc_end 2014-11-02 10:00:00 (Sun)
63530017200, # local_start 2014-03-09 03:00:00 (Sun)
63550576800, # local_end 2014-11-02 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63550605600, # utc_start 2014-11-02 10:00:00 (Sun)
63561495600, # utc_end 2015-03-08 11:00:00 (Sun)
63550573200, # local_start 2014-11-02 01:00:00 (Sun)
63561463200, # local_end 2015-03-08 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63561495600, # utc_start 2015-03-08 11:00:00 (Sun)
63582055200, # utc_end 2015-11-01 10:00:00 (Sun)
63561466800, # local_start 2015-03-08 03:00:00 (Sun)
63582026400, # local_end 2015-11-01 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63582055200, # utc_start 2015-11-01 10:00:00 (Sun)
63593550000, # utc_end 2016-03-13 11:00:00 (Sun)
63582022800, # local_start 2015-11-01 01:00:00 (Sun)
63593517600, # local_end 2016-03-13 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63593550000, # utc_start 2016-03-13 11:00:00 (Sun)
63614109600, # utc_end 2016-11-06 10:00:00 (Sun)
63593521200, # local_start 2016-03-13 03:00:00 (Sun)
63614080800, # local_end 2016-11-06 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63614109600, # utc_start 2016-11-06 10:00:00 (Sun)
63624999600, # utc_end 2017-03-12 11:00:00 (Sun)
63614077200, # local_start 2016-11-06 01:00:00 (Sun)
63624967200, # local_end 2017-03-12 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63624999600, # utc_start 2017-03-12 11:00:00 (Sun)
63645559200, # utc_end 2017-11-05 10:00:00 (Sun)
63624970800, # local_start 2017-03-12 03:00:00 (Sun)
63645530400, # local_end 2017-11-05 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63645559200, # utc_start 2017-11-05 10:00:00 (Sun)
63656449200, # utc_end 2018-03-11 11:00:00 (Sun)
63645526800, # local_start 2017-11-05 01:00:00 (Sun)
63656416800, # local_end 2018-03-11 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63656449200, # utc_start 2018-03-11 11:00:00 (Sun)
63677008800, # utc_end 2018-11-04 10:00:00 (Sun)
63656420400, # local_start 2018-03-11 03:00:00 (Sun)
63676980000, # local_end 2018-11-04 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63677008800, # utc_start 2018-11-04 10:00:00 (Sun)
63687898800, # utc_end 2019-03-10 11:00:00 (Sun)
63676976400, # local_start 2018-11-04 01:00:00 (Sun)
63687866400, # local_end 2019-03-10 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63687898800, # utc_start 2019-03-10 11:00:00 (Sun)
63708458400, # utc_end 2019-11-03 10:00:00 (Sun)
63687870000, # local_start 2019-03-10 03:00:00 (Sun)
63708429600, # local_end 2019-11-03 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63708458400, # utc_start 2019-11-03 10:00:00 (Sun)
63719348400, # utc_end 2020-03-08 11:00:00 (Sun)
63708426000, # local_start 2019-11-03 01:00:00 (Sun)
63719316000, # local_end 2020-03-08 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63719348400, # utc_start 2020-03-08 11:00:00 (Sun)
63739908000, # utc_end 2020-11-01 10:00:00 (Sun)
63719319600, # local_start 2020-03-08 03:00:00 (Sun)
63739879200, # local_end 2020-11-01 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63739908000, # utc_start 2020-11-01 10:00:00 (Sun)
63751402800, # utc_end 2021-03-14 11:00:00 (Sun)
63739875600, # local_start 2020-11-01 01:00:00 (Sun)
63751370400, # local_end 2021-03-14 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63751402800, # utc_start 2021-03-14 11:00:00 (Sun)
63771962400, # utc_end 2021-11-07 10:00:00 (Sun)
63751374000, # local_start 2021-03-14 03:00:00 (Sun)
63771933600, # local_end 2021-11-07 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63771962400, # utc_start 2021-11-07 10:00:00 (Sun)
63782852400, # utc_end 2022-03-13 11:00:00 (Sun)
63771930000, # local_start 2021-11-07 01:00:00 (Sun)
63782820000, # local_end 2022-03-13 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63782852400, # utc_start 2022-03-13 11:00:00 (Sun)
63803412000, # utc_end 2022-11-06 10:00:00 (Sun)
63782823600, # local_start 2022-03-13 03:00:00 (Sun)
63803383200, # local_end 2022-11-06 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63803412000, # utc_start 2022-11-06 10:00:00 (Sun)
63814302000, # utc_end 2023-03-12 11:00:00 (Sun)
63803379600, # local_start 2022-11-06 01:00:00 (Sun)
63814269600, # local_end 2023-03-12 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63814302000, # utc_start 2023-03-12 11:00:00 (Sun)
63834861600, # utc_end 2023-11-05 10:00:00 (Sun)
63814273200, # local_start 2023-03-12 03:00:00 (Sun)
63834832800, # local_end 2023-11-05 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63834861600, # utc_start 2023-11-05 10:00:00 (Sun)
63845751600, # utc_end 2024-03-10 11:00:00 (Sun)
63834829200, # local_start 2023-11-05 01:00:00 (Sun)
63845719200, # local_end 2024-03-10 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63845751600, # utc_start 2024-03-10 11:00:00 (Sun)
63866311200, # utc_end 2024-11-03 10:00:00 (Sun)
63845722800, # local_start 2024-03-10 03:00:00 (Sun)
63866282400, # local_end 2024-11-03 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63866311200, # utc_start 2024-11-03 10:00:00 (Sun)
63877201200, # utc_end 2025-03-09 11:00:00 (Sun)
63866278800, # local_start 2024-11-03 01:00:00 (Sun)
63877168800, # local_end 2025-03-09 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63877201200, # utc_start 2025-03-09 11:00:00 (Sun)
63897760800, # utc_end 2025-11-02 10:00:00 (Sun)
63877172400, # local_start 2025-03-09 03:00:00 (Sun)
63897732000, # local_end 2025-11-02 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63897760800, # utc_start 2025-11-02 10:00:00 (Sun)
63908650800, # utc_end 2026-03-08 11:00:00 (Sun)
63897728400, # local_start 2025-11-02 01:00:00 (Sun)
63908618400, # local_end 2026-03-08 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63908650800, # utc_start 2026-03-08 11:00:00 (Sun)
63929210400, # utc_end 2026-11-01 10:00:00 (Sun)
63908622000, # local_start 2026-03-08 03:00:00 (Sun)
63929181600, # local_end 2026-11-01 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
[
63929210400, # utc_start 2026-11-01 10:00:00 (Sun)
63940705200, # utc_end 2027-03-14 11:00:00 (Sun)
63929178000, # local_start 2026-11-01 01:00:00 (Sun)
63940672800, # local_end 2027-03-14 02:00:00 (Sun)
-32400,
0,
'AKST',
],
[
63940705200, # utc_start 2027-03-14 11:00:00 (Sun)
63961264800, # utc_end 2027-11-07 10:00:00 (Sun)
63940676400, # local_start 2027-03-14 03:00:00 (Sun)
63961236000, # local_end 2027-11-07 02:00:00 (Sun)
-28800,
1,
'AKDT',
],
];
sub olson_version {'2016a'}
sub has_dst_changes {61}
sub _max_year {2026}
sub _new_instance {
return shift->_init( @_, spans => $spans );
}
sub _last_offset { -32400 }
my $last_observance = bless( {
'format' => 'AK%sT',
'gmtoff' => '-9:00',
'local_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 724244,
'local_rd_secs' => 0,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 724244,
'utc_rd_secs' => 0,
'utc_year' => 1984
}, 'DateTime' ),
'offset_from_std' => 0,
'offset_from_utc' => -32400,
'until' => [],
'utc_start_datetime' => bless( {
'formatter' => undef,
'local_rd_days' => 724244,
'local_rd_secs' => 32400,
'offset_modifier' => 0,
'rd_nanosecs' => 0,
'tz' => bless( {
'name' => 'floating',
'offset' => 0
}, 'DateTime::TimeZone::Floating' ),
'utc_rd_days' => 724244,
'utc_rd_secs' => 32400,
'utc_year' => 1984
}, 'DateTime' )
}, 'DateTime::TimeZone::OlsonDB::Observance' )
;
sub _last_observance { $last_observance }
my $rules = [
bless( {
'at' => '2:00',
'from' => '2007',
'in' => 'Mar',
'letter' => 'D',
'name' => 'US',
'offset_from_std' => 3600,
'on' => 'Sun>=8',
'save' => '1:00',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' ),
bless( {
'at' => '2:00',
'from' => '2007',
'in' => 'Nov',
'letter' => 'S',
'name' => 'US',
'offset_from_std' => 0,
'on' => 'Sun>=1',
'save' => '0',
'to' => 'max',
'type' => undef
}, 'DateTime::TimeZone::OlsonDB::Rule' )
]
;
sub _rules { $rules }
1;
| 27.076861 | 89 | 0.632426 |
ed0f3f5d733adebebe425e382af29c3dbf6c8395 | 3,237 | t | Perl | t/mojo/exception.t | dolmen/p5-Mojolicious | 039a827893225a4be3c722cc8674bc0bffbc3b43 | [
"Artistic-2.0"
] | null | null | null | t/mojo/exception.t | dolmen/p5-Mojolicious | 039a827893225a4be3c722cc8674bc0bffbc3b43 | [
"Artistic-2.0"
] | 1 | 2020-04-23T03:40:10.000Z | 2020-04-23T06:05:14.000Z | t/mojo/exception.t | dolmen/p5-Mojolicious | 039a827893225a4be3c722cc8674bc0bffbc3b43 | [
"Artistic-2.0"
] | 3 | 2020-04-17T10:01:35.000Z | 2021-12-04T13:15:39.000Z | use Mojo::Base -strict;
use Test::More;
use Mojo::Exception;
# Basics
my $e = Mojo::Exception->new;
is $e->message, 'Exception!', 'right message';
is "$e", 'Exception!', 'right message';
$e = Mojo::Exception->new('Test!');
is $e->message, 'Test!', 'right message';
is "$e", 'Test!', 'right message';
# Context information
eval {
# test
my $wrapper = sub { Mojo::Exception->throw('Works!') };
$wrapper->();
# test
};
$e = $@;
isa_ok $e, 'Mojo::Exception', 'right class';
is $e, 'Works!', 'right result';
like $e->frames->[0][1], qr/exception\.t/, 'right file';
is $e->lines_before->[0][0], 15, 'right number';
is $e->lines_before->[0][1], 'eval {', 'right line';
is $e->lines_before->[1][0], 16, 'right number';
ok !$e->lines_before->[1][1], 'empty line';
is $e->lines_before->[2][0], 17, 'right number';
is $e->lines_before->[2][1], ' # test', 'right line';
is $e->lines_before->[3][0], 18, 'right number';
ok !$e->lines_before->[3][1], 'empty line';
is $e->lines_before->[4][0], 19, 'right number';
is $e->lines_before->[4][1],
" my \$wrapper = sub { Mojo::Exception->throw('Works!') };", 'right line';
is $e->line->[0], 20, 'right number';
is $e->line->[1], " \$wrapper->();", 'right line';
is $e->lines_after->[0][0], 21, 'right number';
ok !$e->lines_after->[0][1], 'empty line';
is $e->lines_after->[1][0], 22, 'right number';
is $e->lines_after->[1][1], ' # test', 'right line';
is $e->lines_after->[2][0], 23, 'right number';
ok !$e->lines_after->[2][1], 'empty line';
is $e->lines_after->[3][0], 24, 'right number';
is $e->lines_after->[3][1], '};', 'right line';
is $e->lines_after->[4][0], 25, 'right number';
is $e->lines_after->[4][1], '$e = $@;', 'right line';
# Trace
sub wrapper2 { Mojo::Exception->new->trace(@_) }
sub wrapper1 { wrapper2(@_) }
like wrapper1()->frames->[0][3], qr/wrapper2/, 'right subroutine';
like wrapper1(0)->frames->[0][3], qr/trace/, 'right subroutine';
like wrapper1(1)->frames->[0][3], qr/wrapper2/, 'right subroutine';
like wrapper1(2)->frames->[0][3], qr/wrapper1/, 'right subroutine';
# Inspect
$e = Mojo::Exception->new("Whatever at @{[__FILE__]} line 3.");
is_deeply $e->lines_before, [], 'no lines';
is_deeply $e->line, [], 'no line';
is_deeply $e->lines_after, [], 'no lines';
$e->inspect;
is_deeply $e->lines_before->[-1], [2, ''], 'right line';
is_deeply $e->line, [3, 'use Test::More;'], 'right line';
is_deeply $e->lines_after->[0], [4, 'use Mojo::Exception;'], 'right line';
$e->message("Died at @{[__FILE__]} line 4.")->inspect;
is_deeply $e->lines_before->[-1], [3, 'use Test::More;'], 'right line';
is_deeply $e->line, [4, 'use Mojo::Exception;'], 'right line';
is_deeply $e->lines_after->[0], [5, ''], 'right line';
# Verbose
$e = Mojo::Exception->new('Test!')->verbose(1);
$e->lines_before([[3, 'foo();']])->line([4, 'die;'])
->lines_after([[5, 'bar();']]);
is $e, <<EOF, 'right result';
Test!
3: foo();
4: die;
5: bar();
EOF
$e->message("Works!\n")->lines_before([])->lines_after([]);
is $e, <<EOF, 'right result';
Works!
4: die;
EOF
done_testing();
| 35.184783 | 77 | 0.560704 |
ed3f4fcb943ca46ce1bae028aafa063e036465b1 | 14 | t | Perl | bin/ime/uim_anthy/.anthy/imported_words_default.d/gf-fuzoku-34.t | fossabot/dotfiles-36 | 338c4bb93242bffb5c0d7e7256ce38b49f419727 | [
"MIT"
] | 2 | 2018-08-14T03:04:07.000Z | 2019-05-15T10:50:48.000Z | bin/ime/uim_anthy/.anthy/imported_words_default.d/gf-fuzoku-34.t | fossabot/dotfiles-36 | 338c4bb93242bffb5c0d7e7256ce38b49f419727 | [
"MIT"
] | 10 | 2018-04-24T11:29:46.000Z | 2021-12-04T03:51:57.000Z | bin/ime/uim_anthy/.anthy/imported_words_default.d/gf-fuzoku-34.t | fossabot/dotfiles-36 | 338c4bb93242bffb5c0d7e7256ce38b49f419727 | [
"MIT"
] | 1 | 2021-12-04T03:49:24.000Z | 2021-12-04T03:49:24.000Z | りっとる #JS*20 ℓ
| 7 | 13 | 0.642857 |
edc9396f8c2975aff8566f2f2c012cb29b97acad | 882 | pm | Perl | vulntracker/perl/vulntracker/VulnDB/RDBO/Qualifier.pm | jonaustin/advisoryscan | ba452c155f0d478450e0c91de5ea00f404e98616 | [
"MIT"
] | null | null | null | vulntracker/perl/vulntracker/VulnDB/RDBO/Qualifier.pm | jonaustin/advisoryscan | ba452c155f0d478450e0c91de5ea00f404e98616 | [
"MIT"
] | null | null | null | vulntracker/perl/vulntracker/VulnDB/RDBO/Qualifier.pm | jonaustin/advisoryscan | ba452c155f0d478450e0c91de5ea00f404e98616 | [
"MIT"
] | 1 | 2018-12-06T12:50:52.000Z | 2018-12-06T12:50:52.000Z | package VulnDB::RDBO::Qualifier;
use strict;
use base qw(VulnDB::RDBO);
__PACKAGE__->meta->setup(
table => 'qualifier',
columns => [
id => { type => 'integer', not_null => 1 },
name => { type => 'varchar', default => '', length => 50, not_null => 1 },
desc => { type => 'varchar', length => 150 },
sql => { type => 'varchar', default => '', length => 150, not_null => 1 },
rose => { type => 'varchar', default => '', length => 50, not_null => 1 },
rose_sql => { type => 'varchar', default => '', length => 50, not_null => 1 },
],
primary_key_columns => [ 'id' ],
relationships => [
user_criteria => {
class => 'VulnDB::RDBO::UserCriteria',
column_map => { id => 'qualifier_id' },
type => 'one to many',
},
],
);
1;
| 28.451613 | 87 | 0.468254 |
ed4f1f6b08df4a366c8fd61b13e15e773a64ce95 | 3,807 | pl | Perl | CommonInstall/comp_delta_prereq_SLES151.pl | dsilakov/opa-ff | a0aa9b9f8b2e7ada4fc1645596a0731fb91d6b4d | [
"Intel"
] | 4 | 2018-05-26T14:03:22.000Z | 2019-05-14T09:56:19.000Z | CommonInstall/comp_delta_prereq_SLES151.pl | dsilakov/opa-ff | a0aa9b9f8b2e7ada4fc1645596a0731fb91d6b4d | [
"Intel"
] | 10 | 2018-07-24T14:21:58.000Z | 2019-10-30T18:07:00.000Z | CommonInstall/comp_delta_prereq_SLES151.pl | dsilakov/opa-ff | a0aa9b9f8b2e7ada4fc1645596a0731fb91d6b4d | [
"Intel"
] | 8 | 2018-07-29T18:21:10.000Z | 2020-01-07T18:10:19.000Z | #!/usr/bin/perl
## BEGIN_ICS_COPYRIGHT8 ****************************************
#
# Copyright (c) 2015-2020, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
## END_ICS_COPYRIGHT8 ****************************************
#
## [ICS VERSION STRING: unknown]
#use strict;
##use Term::ANSIColor;
##use Term::ANSIColor qw(:constants);
##use File::Basename;
##use Math::BigInt;
#
## ==========================================================================
#
#Installation Prequisites array for delta components
my @opa_stack_prereq = (
"bash",
"kmod",
"rdma-core",
"rdma-ndd",
"systemd",
"coreutils",
"grep",
"libosmcomp4",
"libibmad5",
"libibumad3",
"rdma-core-devel",
);
$comp_prereq_hash{'opa_stack_prereq'} = \@opa_stack_prereq;
my @intel_hfi_prereq = (
"glibc",
"libgcc_s1",
"bash",
"udev",
"libudev-devel",
"python-base",
"libedit0",
"libncurses6",
"libnuma1",
"irqbalance",
"libatomic1",
);
$comp_prereq_hash{'intel_hfi_prereq'} = \@intel_hfi_prereq;
my @mvapich2_gcc_hfi_prereq = (
"bash",
"glibc",
"libz1",
"mpi-selector",
);
$comp_prereq_hash{'mvapich2_gcc_hfi_prereq'} = \@mvapich2_gcc_hfi_prereq;
my @mvapich2_intel_hfi_prereq = (
"bash",
"mpi-selector",
);
$comp_prereq_hash{'mvapich2_intel_hfi_prereq'} = \@mvapich2_intel_hfi_prereq;
my @openmpi_gcc_hfi_prereq = (
"glibc",
"bash",
"pkg-config",
"libgcc_s1",
"libgfortran4",
"gcc-fortran",
"libgomp1",
"libibverbs1",
"libquadmath0",
"librdmacm1",
"libstdc++6",
"libz1",
"opensm-devel",
"mpi-selector",
);
$comp_prereq_hash{'openmpi_gcc_hfi_prereq'} = \@openmpi_gcc_hfi_prereq;
my @openmpi_intel_hfi_prereq = (
"bash",
"mpi-selector",
);
$comp_prereq_hash{'openmpi_intel_hfi_prereq'} = \@openmpi_intel_hfi_prereq;
my @mvapich2_prereq = (
"bash",
"libibverbs1",
"librdmacm1",
"glibc",
"libz1",
"mpi-selector",
);
$comp_prereq_hash{'mvapich2_prereq'} = \@mvapich2_prereq;
my @openmpi_prereq = (
"glibc",
"bash",
"libz1",
"pkg-config",
"libgcc_s1",
"libgfortran3",
"gcc-fortran",
"libgomp1",
"libibverbs1",
"libquadmath0",
"librdmacm1",
"libstdc++6",
"libz1",
"opensm-libs3",
"opensm-devel",
"mpi-selector",
);
$comp_prereq_hash{'openmpi_prereq'} = \@openmpi_prereq;
| 27.586957 | 80 | 0.669293 |
ed7cc5bd42fc2f65c5120cb046c3a9bdb62c63d4 | 1,104 | t | Perl | t/PR75-get-number-of-followers.t | tawAsh1/line-bot-sdk-perl | 88a017aadcc931151587d9d9e728fd1e373020a2 | [
"Artistic-2.0"
] | 81 | 2016-04-15T17:18:39.000Z | 2021-04-10T08:13:33.000Z | t/PR75-get-number-of-followers.t | tawAsh1/line-bot-sdk-perl | 88a017aadcc931151587d9d9e728fd1e373020a2 | [
"Artistic-2.0"
] | 95 | 2016-04-19T05:11:11.000Z | 2022-03-25T08:42:10.000Z | t/PR75-get-number-of-followers.t | tawAsh1/line-bot-sdk-perl | 88a017aadcc931151587d9d9e728fd1e373020a2 | [
"Artistic-2.0"
] | 39 | 2016-04-21T06:28:02.000Z | 2021-07-08T02:13:40.000Z | use strict;
use warnings;
use utf8;
use Test::More;
use lib 't/lib';
use t::Util;
use LINE::Bot::API;
use LINE::Bot::API::Builder::SendMessage;
use Carp ();
$SIG{__DIE__} = \&Carp::confess;
my $bot = LINE::Bot::API->new(
channel_secret => 'testsecret',
channel_access_token => 'ACCESS_TOKEN',
);
send_request {
my $res = $bot->get_number_of_followers({ date => "20200214" });
ok $res->is_success;
is $res->http_status, 200;
is $res->status, "ready";
is $res->followers, 42;
is $res->targetedReaches, 12345;
is $res->blocks, 4321;
} receive_request {
my %args = @_;
is $args{method}, 'GET';
is $args{url}, 'https://api.line.me/v2/bot/insight/followers?date=20200214',
my $has_header = 0;
my @headers = @{ $args{headers} };
while (my($key, $value) = splice @headers, 0, 2) {
$has_header++ if $key eq 'Authorization' && $value eq 'Bearer ACCESS_TOKEN';
}
is $has_header, 1;
+{
status => "ready",
followers => 42,
targetedReaches => 12345,
blocks => 4321,
};
};
done_testing;
| 23 | 84 | 0.589674 |
ed02bbe1afc66c764fa2269f972deaea4c98adb0 | 1,638 | pm | Perl | auto-lib/Azure/Devices/CreateOrUpdateDpsCertificate.pm | pplu/azure-sdk-perl | 26cbef2d926f571bc1617c26338c106856f95568 | [
"Apache-2.0"
] | null | null | null | auto-lib/Azure/Devices/CreateOrUpdateDpsCertificate.pm | pplu/azure-sdk-perl | 26cbef2d926f571bc1617c26338c106856f95568 | [
"Apache-2.0"
] | null | null | null | auto-lib/Azure/Devices/CreateOrUpdateDpsCertificate.pm | pplu/azure-sdk-perl | 26cbef2d926f571bc1617c26338c106856f95568 | [
"Apache-2.0"
] | 1 | 2021-04-08T15:26:39.000Z | 2021-04-08T15:26:39.000Z | package Azure::Devices::CreateOrUpdateDpsCertificate;
use Moose;
use MooseX::ClassAttribute;
has 'If_Match' => (is => 'ro', isa => 'Str',
traits => [ 'Azure::ParamInHeader', 'Azure::LocationInResponse' ], location => 'If-Match',
);
has 'api_version' => (is => 'ro', required => 1, isa => 'Str', default => '2018-01-22',
traits => [ 'Azure::ParamInQuery', 'Azure::LocationInResponse' ], location => 'api-version',
);
has 'certificateDescription' => (is => 'ro', required => 1, isa => 'Azure::Devices::CertificateBodyDescription',
traits => [ 'Azure::ParamInBody' ],
);
has 'certificateName' => (is => 'ro', required => 1, isa => 'Str',
traits => [ 'Azure::ParamInPath' ],
);
has 'provisioningServiceName' => (is => 'ro', required => 1, isa => 'Str',
traits => [ 'Azure::ParamInPath' ],
);
has 'resourceGroupName' => (is => 'ro', required => 1, isa => 'Str',
traits => [ 'Azure::ParamInPath' ],
);
has 'subscriptionId' => (is => 'ro', required => 1, isa => 'Str',
traits => [ 'Azure::ParamInPath' ],
);
class_has _api_uri => (is => 'ro', default => '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/certificates/{certificateName}');
class_has _returns => (is => 'ro', isa => 'HashRef', default => sub { {
200 => 'Azure::Devices::CreateOrUpdateDpsCertificateResult',
default => 'Azure::Devices::CreateOrUpdateDpsCertificateResult',
} });
class_has _is_async => (is => 'ro', default => 0);
class_has _api_method => (is => 'ro', default => 'PUT');
1;
| 43.105263 | 224 | 0.615385 |
ed9df5755da87173b911d4961bd4587b007a9590 | 3,568 | t | Perl | src/z80asm/t/option_debug.t | UnivEngineer/z88dk | 9047beba62595b1d88991bc934da75c0e2030d07 | [
"ClArtistic"
] | 1 | 2022-03-08T11:55:58.000Z | 2022-03-08T11:55:58.000Z | src/z80asm/t/option_debug.t | litwr2/z88dk | 13e55c9be0dfe79684caf901b15ac3d4ae2dc0f0 | [
"ClArtistic"
] | 2 | 2022-03-20T22:17:35.000Z | 2022-03-24T16:10:00.000Z | src/z80asm/t/option_debug.t | litwr2/z88dk | 13e55c9be0dfe79684caf901b15ac3d4ae2dc0f0 | [
"ClArtistic"
] | null | null | null | #!/usr/bin/perl
# Z88DK Z80 Macro Assembler
#
# Copyright (C) Paulo Custodio, 2011-2022
# License: The Artistic License 2.0, http://www.perlfoundation.org/artistic_license_2_0
# Repository: https://github.com/z88dk/z88dk/
#
# Test -debug info in map file
use Modern::Perl;
use Test::More;
require './t/testlib.pl';
# only ASM code
spew("test1.asm", <<END);
public func
c_line 1, "test1.c"
func:
ld a, 1
ld b, 3
c_line 2
add a, b
ret
END
spew("test.asm", <<END);
extern func
main:
call func
ret
END
run("./z88dk-z80asm -b -m test.asm test1.asm");
check_text_file("test.map", <<'END');
main = $0000 ; addr, local, , test, , test.asm:3
func = $0004 ; addr, public, , test1, , test1.c:1
__head = $0000 ; const, public, def, , ,
__tail = $000A ; const, public, def, , ,
__size = $000A ; const, public, def, , ,
END
run("./z88dk-z80asm -b -debug test.asm test1.asm");
check_text_file("test.map", <<'END');
main = $0000 ; addr, local, , test, , test.asm:3
__ASM_LINE_3_test_2easm = $0000 ; addr, local, , test, , test.asm:3
__ASM_LINE_4_test_2easm = $0000 ; addr, local, , test, , test.asm:4
__ASM_LINE_5_test_2easm = $0003 ; addr, local, , test, , test.asm:5
__C_LINE_1_test1_2ec = $0004 ; addr, local, , test1, , test1.c:1
__C_LINE_2_test1_2ec = $0008 ; addr, local, , test1, , test1.c:2
func = $0004 ; addr, public, , test1, , test1.c:1
__head = $0000 ; const, public, def, , ,
__tail = $000A ; const, public, def, , ,
__size = $000A ; const, public, def, , ,
END
# ASM and C code
unlink_testfiles();
spew("test1.asm", <<END);
public _one
_one:
ld hl, 1
ret
END
spew("test.h", <<END);
// some
// random
extern int one();
// lines
END
spew("test.c", <<END);
#include "test.h"
int main() {
return one()+one();
}
END
run("zcc +zx -m -debug test.c test1.asm -o test.bin");
my $map = join("\n", grep {/test.c:|test.h:|test1.asm:/} split('\n', slurp("test.map")))."\n";
check_text($map, <<'END', "map file contents");
__C_LINE_0_test_2ec = $80CD ; addr, local, , test_c, , test.c:0
__C_LINE_1_test_2ec = $80CD ; addr, local, , test_c, , test.c:1
__C_LINE_0_test_2eh = $80CD ; addr, local, , test_c, , test.h:0
__C_LINE_3_test_2eh = $80CD ; addr, local, , test_c, , test.h:3
__C_LINE_5_test_2eh = $80CD ; addr, local, , test_c, , test.h:5
__C_LINE_2_test_2ec = $80CD ; addr, local, , test_c, , test.c:2
__C_LINE_2_test_2ec_3a_3amain_3a_3a0_3a_3a1 = $8286 ; addr, local, , test_c, code_compiler, test.c::main::0::1:2
__C_LINE_3_test_2ec_3a_3amain_3a_3a1_3a_3a2 = $8289 ; addr, local, , test_c, code_compiler, test.c::main::1::2:3
__ASM_LINE_2_test1_2easm = $80CD ; addr, local, , test1_asm, , test1.asm:2
__ASM_LINE_3_test1_2easm = $80CD ; addr, local, , test1_asm, , test1.asm:3
__ASM_LINE_4_test1_2easm = $80D0 ; addr, local, , test1_asm, , test1.asm:4
_main = $8286 ; addr, public, , test_c, code_compiler, test.c::main::0::1:2
__CDBINFO__F_3aG_24main_24_30_5f_30_24_30_28_7b_30_7dDF_2cSI_3aS_29_2cC_2c_30_2c_30_2c_30_2c_30_2c_30 = $0001 ; const, public, , test_c, code_compiler, test.c::main::1::2:3
_one = $80CD ; addr, public, , test1_asm, , test1.asm:2
END
unlink_testfiles();
done_testing();
| 33.980952 | 172 | 0.591087 |
edd5cf34fbb0500f40adb7b423f5f2afd8ec524f | 8,952 | pm | Perl | benchmarks/spec2k6bin/specint/perl_depends/lib/Date/Parse.pm | YangZhou1997/DynamicCache_v2 | 60bc1e01e0eaf88f6c8e959cb6316e20ac910ed2 | [
"BSD-3-Clause"
] | 430 | 2015-01-05T19:21:10.000Z | 2022-03-29T07:19:18.000Z | benchmarks/spec2k6bin/specint/perl_depends/lib/Date/Parse.pm | YangZhou1997/DynamicCache_v2 | 60bc1e01e0eaf88f6c8e959cb6316e20ac910ed2 | [
"BSD-3-Clause"
] | 9 | 2015-01-20T17:42:30.000Z | 2022-03-04T22:05:43.000Z | benchmarks/spec2k6bin/specint/perl_depends/lib/Date/Parse.pm | YangZhou1997/DynamicCache_v2 | 60bc1e01e0eaf88f6c8e959cb6316e20ac910ed2 | [
"BSD-3-Clause"
] | 41 | 2015-05-10T17:08:50.000Z | 2022-01-19T01:15:19.000Z | # Date::Parse $Id: //depot/TimeDate/lib/Date/Parse.pm#22 $
#
# Copyright (c) 1995 Graham Barr. All rights reserved. This program is free
# software; you can redistribute it and/or modify it under the same terms
# as Perl itself.
package Date::Parse;
require 5.000;
use strict;
use vars qw($VERSION @ISA @EXPORT);
use Time::Local;
use Carp;
use Time::Zone;
use Exporter;
@ISA = qw(Exporter);
@EXPORT = qw(&strtotime &str2time &strptime);
$VERSION = "2.27";
my %month = (
january => 0,
february => 1,
march => 2,
april => 3,
may => 4,
june => 5,
july => 6,
august => 7,
september => 8,
sept => 8,
october => 9,
november => 10,
december => 11,
);
my %day = (
sunday => 0,
monday => 1,
tuesday => 2,
tues => 2,
wednesday => 3,
wednes => 3,
thursday => 4,
thur => 4,
thurs => 4,
friday => 5,
saturday => 6,
);
my @suf = (qw(th st nd rd th th th th th th)) x 3;
@suf[11,12,13] = qw(th th th);
#Abbreviations
map { $month{substr($_,0,3)} = $month{$_} } keys %month;
map { $day{substr($_,0,3)} = $day{$_} } keys %day;
my $strptime = <<'ESQ';
my %month = map { lc $_ } %$mon_ref;
my $daypat = join("|", map { lc $_ } reverse sort keys %$day_ref);
my $monpat = join("|", reverse sort keys %month);
my $sufpat = join("|", reverse sort map { lc $_ } @$suf_ref);
my %ampm = (
'a' => 0, # AM
'p' => 12, # PM
);
my($AM, $PM) = (0,12);
sub {
my $dtstr = lc shift;
my $merid = 24;
my($year,$month,$day,$hh,$mm,$ss,$zone,$dst,$frac);
$zone = tz_offset(shift) if @_;
1 while $dtstr =~ s#\([^\(\)]*\)# #o;
$dtstr =~ s#(\A|\n|\Z)# #sog;
# ignore day names
$dtstr =~ s#([\d\w\s])[\.\,]\s#$1 #sog;
$dtstr =~ s/,/ /g;
$dtstr =~ s#($daypat)\s*(den\s)?# #o;
# Time: 12:00 or 12:00:00 with optional am/pm
return unless $dtstr =~ /\S/;
if ($dtstr =~ s/\s(\d{4})([-:]?)(\d\d?)\2(\d\d?)(?:[Tt ](\d\d?)(?:([-:]?)(\d\d?)(?:\6(\d\d?)(?:[.,](\d+))?)?)?)?(?=\D)/ /) {
($year,$month,$day,$hh,$mm,$ss,$frac) = ($1,$3-1,$4,$5,$7,$8,$9);
}
unless (defined $hh) {
if ($dtstr =~ s#[:\s](\d\d?):(\d\d?)(:(\d\d?)(?:\.\d+)?)?\s*(?:([ap])\.?m?\.?)?\s# #o) {
($hh,$mm,$ss) = ($1,$2,$4 || 0);
$merid = $ampm{$5} if $5;
}
# Time: 12 am
elsif ($dtstr =~ s#\s(\d\d?)\s*([ap])\.?m?\.?\s# #o) {
($hh,$mm,$ss) = ($1,0,0);
$merid = $ampm{$2};
}
}
if (defined $hh and $hh <= 12 and $dtstr =~ s# ([ap])\.?m?\.?\s# #o) {
$merid = $ampm{$1};
}
unless (defined $year) {
# Date: 12-June-96 (using - . or /)
if ($dtstr =~ s#\s(\d\d?)([\-\./])($monpat)(\2(\d\d+))?\s# #o) {
($month,$day) = ($month{$3},$1);
$year = $5 if $5;
}
# Date: 12-12-96 (using '-', '.' or '/' )
elsif ($dtstr =~ s#\s(\d+)([\-\./])(\d\d?)(\2(\d+))?\s# #o) {
($month,$day) = ($1 - 1,$3);
if ($5) {
$year = $5;
# Possible match for 1995-01-24 (short mainframe date format);
($year,$month,$day) = ($1, $3 - 1, $5) if $month > 12;
return if length($year) > 2 and $year < 1901;
}
}
elsif ($dtstr =~ s#\s(\d+)\s*($sufpat)?\s*($monpat)# #o) {
($month,$day) = ($month{$3},$1);
}
elsif ($dtstr =~ s#($monpat)\s*(\d+)\s*($sufpat)?\s# #o) {
($month,$day) = ($month{$1},$2);
}
# Date: 961212
elsif ($dtstr =~ s#\s(\d\d)(\d\d)(\d\d)\s# #o) {
($year,$month,$day) = ($1,$2-1,$3);
}
$year = $1 if !defined($year) and $dtstr =~ s#\s(\d{2}(\d{2})?)[\s\.,]# #o;
}
# Zone
$dst = 1 if $dtstr =~ s#\bdst\b##o;
if ($dtstr =~ s#\s"?([a-z]{3,4})(dst|\d+[a-z]*|_[a-z]+)?"?\s# #o) {
$dst = 1 if $2 and $2 eq 'dst';
$zone = tz_offset($1);
return unless defined $zone;
}
elsif ($dtstr =~ s#\s([a-z]{3,4})?([\-\+]?)-?(\d\d?):?(\d\d)?(00)?\s# #o) {
my $m = defined($4) ? "$2$4" : 0;
my $h = "$2$3";
$zone = defined($1) ? tz_offset($1) : 0;
return unless defined $zone;
$zone += 60 * ($m + (60 * $h));
}
if ($dtstr =~ /\S/) {
# now for some dumb dates
if ($dtstr =~ s/^\s*(ut?|z)\s*$//) {
$zone = 0;
}
elsif ($dtstr =~ s#\s([a-z]{3,4})?([\-\+]?)-?(\d\d?)(\d\d)?(00)?\s# #o) {
my $m = defined($4) ? "$2$4" : 0;
my $h = "$2$3";
$zone = defined($1) ? tz_offset($1) : 0;
return unless defined $zone;
$zone += 60 * ($m + (60 * $h));
}
return if $dtstr =~ /\S/o;
}
if (defined $hh) {
if ($hh == 12) {
$hh = 0 if $merid == $AM;
}
elsif ($merid == $PM) {
$hh += 12;
}
}
$year -= 1900 if defined $year && $year > 1900;
$zone += 3600 if defined $zone && $dst;
$ss += "0.$frac" if $frac;
return ($ss,$mm,$hh,$day,$month,$year,$zone);
}
ESQ
use vars qw($day_ref $mon_ref $suf_ref $obj);
sub gen_parser
{
local($day_ref,$mon_ref,$suf_ref,$obj) = @_;
if($obj)
{
my $obj_strptime = $strptime;
substr($obj_strptime,index($strptime,"sub")+6,0) = <<'ESQ';
shift; # package
ESQ
my $sub = eval "$obj_strptime" or die $@;
return $sub;
}
eval "$strptime" or die $@;
}
*strptime = gen_parser(\%day,\%month,\@suf);
sub str2time
{
my @t = strptime(@_);
return undef
unless @t;
my($ss,$mm,$hh,$day,$month,$year,$zone) = @t;
# CPU2006
#my @lt = localtime(time);
my @lt = gmtime(time);
$hh ||= 0;
$mm ||= 0;
$ss ||= 0;
my $frac = $ss - int($ss);
$ss = int $ss;
$month = $lt[4]
unless(defined $month);
$day = $lt[3]
unless(defined $day);
$year = ($month > $lt[4]) ? ($lt[5] - 1) : $lt[5]
unless(defined $year);
return undef
unless($month <= 11 && $day >= 1 && $day <= 31
&& $hh <= 23 && $mm <= 59 && $ss <= 59);
my $result;
if (defined $zone) {
$result = eval {
local $SIG{__DIE__} = sub {}; # Ick!
timegm($ss,$mm,$hh,$day,$month,$year);
};
return undef
if !defined $result
or $result == -1
&& join("",$ss,$mm,$hh,$day,$month,$year)
ne "595923311169";
$result -= $zone;
}
else {
$result = eval {
local $SIG{__DIE__} = sub {}; # Ick!
timelocal($ss,$mm,$hh,$day,$month,$year);
};
return undef
if !defined $result
or $result == -1
&& join("",$ss,$mm,$hh,$day,$month,$year)
# CPU2006
# ne join("",(localtime(-1))[0..5]);
ne join("",(gmtime(-1))[0..5]);
}
return $result + $frac;
}
1;
__END__
=head1 NAME
Date::Parse - Parse date strings into time values
=head1 SYNOPSIS
use Date::Parse;
$time = str2time($date);
($ss,$mm,$hh,$day,$month,$year,$zone) = strptime($date);
=head1 DESCRIPTION
C<Date::Parse> provides two routines for parsing date strings into time values.
=over 4
=item str2time(DATE [, ZONE])
C<str2time> parses C<DATE> and returns a unix time value, or undef upon failure.
C<ZONE>, if given, specifies the timezone to assume when parsing if the
date string does not specify a timezome.
=item strptime(DATE [, ZONE])
C<strptime> takes the same arguments as str2time but returns an array of
values C<($ss,$mm,$hh,$day,$month,$year,$zone)>. Elements are only defined
if they could be extracted from the date string. The C<$zone> element is
the timezone offset in seconds from GMT. An empty array is returned upon
failure.
=head1 MULTI-LANGUAGE SUPPORT
Date::Parse is capable of parsing dates in several languages, these are
English, French, German and Italian.
$lang = Date::Language->new('German');
$lang->str2time("25 Jun 1996 21:09:55 +0100");
=head1 EXAMPLE DATES
Below is a sample list of dates that are known to be parsable with Date::Parse
1995:01:24T09:08:17.1823213 ISO-8601
1995-01-24T09:08:17.1823213
Wed, 16 Jun 94 07:29:35 CST Comma and day name are optional
Thu, 13 Oct 94 10:13:13 -0700
Wed, 9 Nov 1994 09:50:32 -0500 (EST) Text in ()'s will be ignored.
21 dec 17:05 Will be parsed in the current time zone
21-dec 17:05
21/dec 17:05
21/dec/93 17:05
1999 10:02:18 "GMT"
16 Nov 94 22:28:20 PST
=head1 LIMITATION
Date::Parse uses Time::Local internally, so is limited to only parsing dates
which result in valid values for Time::Local::timelocal
=head1 BUGS
When both the month and the date are specified in the date as numbers
they are always parsed assuming that the month number comes before the
date. This is the usual format used in American dates.
The reason why it is like this and not dynamic is that it must be
deterministic. Several people have suggested using the current locale,
but this will not work as the date being parsed may not be in the format
of the current locale.
My plans to address this, which will be in a future release, is to allow
the programmer to state what order they want these values parsed in.
=head1 AUTHOR
Graham Barr <[email protected]>
=head1 COPYRIGHT
Copyright (c) 1995 Graham Barr. All rights reserved. This program is free
software; you can redistribute it and/or modify it under the same terms
as Perl itself.
=cut
# $Id: //depot/TimeDate/lib/Date/Parse.pm#22 $
| 23.3125 | 126 | 0.545353 |
ed6987d335f4ce7d1d3f880f05a70fb670b5a5ab | 2,949 | pm | Perl | lib/Devel/hdb/App/Action.pm | gitpan/Devel-hdb | 1fa37cb35666be40e08895e2752a4754ca747e01 | [
"Artistic-1.0"
] | null | null | null | lib/Devel/hdb/App/Action.pm | gitpan/Devel-hdb | 1fa37cb35666be40e08895e2752a4754ca747e01 | [
"Artistic-1.0"
] | null | null | null | lib/Devel/hdb/App/Action.pm | gitpan/Devel-hdb | 1fa37cb35666be40e08895e2752a4754ca747e01 | [
"Artistic-1.0"
] | null | null | null | package Devel::hdb::App::Action;
use strict;
use warnings;
use base 'Devel::hdb::App::Breakpoint';
sub response_url_base() { '/actions' }
__PACKAGE__->add_route('post', response_url_base(), 'set');
__PACKAGE__->add_route('get', qr{(/actions/\w+)$}, 'get');
__PACKAGE__->add_route('post', qr{(/actions/\w+)$}, 'change');
__PACKAGE__->add_route('delete', qr{(/actions/\w+)$}, 'delete');
__PACKAGE__->add_route('get', '/actions', 'get_all');
sub actionable_adder() { 'add_action' }
sub actionable_remover() { 'remove_action' }
sub actionable_type() { 'Devel::Chitin::Action' }
{
my(%my_actions);
sub storage { \%my_actions; }
}
1;
=pod
=head1 NAME
Devel::hdb::App::Action - Get and set line actions
=head1 DESCRIPTION
Line actions are perl code snippets run just before executable statements in
the debugged program. The return value is ignored. These code snippets are
run in the context of the debugged program, and can change the program's
state, including lexical variables.
=head2 Routes
=over 4
=item GET /actions
Get action information about a particular file and line number. Accepts
these parameters as filters to limit the returned breakpoint data:
filename File name
line Line number
code Perl code string
inactive True if the breakpoint is inactive
Returns 200 and a JSON-encoded array containing hashes with these keys:
filename => File name
lineno => Line number
code => Code to execute for this action
inactive => 1 (yes) or undef (no), whether this action
is disabled/inactive
href => URL string to uniquely identify this action
=item POST /actions
Create an action. Action details must appear in the body as JSON hash
with these keys:
filename File name
line Line number
code Action code to run before this line executes.
inactive Set to true to make the action inactive, false to
clear the setting.
It responds 200 with the same JSON-encoded hash as GET /actions.
Returns 403 if the line is not breakable.
Returns 404 if the filename is not loaded.
=item GET /actions/<id>
Return the same JSON-encoded hash as GET /breakpoints.
Returns 404 if there is no breakpoint with that id.
=item POST /actions/<id>
Change an action property. The body contains a JSON hash of which keys to
change, along with their new values. Returns 200 and the same JSON hash
as GET /actions, including the new values.
Returns 403 if the given property cannot be changed.
Returns 404 if there is no action with that id.
=item DELETE /actions/<id>
Delete the action with the given id. Returns 204 if successful.
Returns 404 if there is no action with that id.
=back
=head1 SEE ALSO
Devel::hdb
=head1 AUTHOR
Anthony Brummett <[email protected]>
=head1 COPYRIGHT
Copyright 2014, Anthony Brummett. This module is free software. It may
be used, redistributed and/or modified under the same terms as Perl itself.
| 27.055046 | 76 | 0.726687 |
ed27db7bde9a94f365aca09db38c3cbc8bf6533c | 6,073 | t | Perl | src/wxWidgets/distrib/msw/tmake/filelist.t | HumanGamer/Torsion-Blitz | 7e8bea9919870ed783206715803822b5f7fff00b | [
"MIT"
] | 38 | 2016-02-20T02:46:28.000Z | 2021-11-17T11:39:57.000Z | src/wxWidgets/distrib/msw/tmake/filelist.t | HumanGamer/Torsion | ac502a37dcd953a5bb41c800d8973f0188c9f9d6 | [
"MIT"
] | 17 | 2016-02-20T02:19:55.000Z | 2021-02-08T15:15:17.000Z | src/wxWidgets/distrib/msw/tmake/filelist.t | HumanGamer/Torsion | ac502a37dcd953a5bb41c800d8973f0188c9f9d6 | [
"MIT"
] | 46 | 2016-02-20T02:47:33.000Z | 2021-01-31T15:46:05.000Z | #!#############################################################################
#! File: filelist.t
#! Purpose: tmake template file containig Perl code to parse the filelist.txt
#! file - this is used by all other templates.
#! Author: Vadim Zeitlin
#! Created: 14.07.99
#! Version: $Id: filelist.t,v 1.26 2003/04/17 20:26:52 MBN Exp $
#!#############################################################################
#${
use lib './lib';
use wxFileInfo;
open(FILELIST, "filelist.txt") or die "Can't open filelist file: $!\n";
#! maps file types to array names, for example an entry of the form
#! FooH => 'wxXYZ' means that all files with type "FooH" will be
#! added to an array named @wxXYZ
my %type_2_array = (
Common => "wxCommon",
Generic => "wxGeneric",
GenericH => "wxGenericInclude",
HTML => "wxHtml",
HtmlH => "wxHtmlInclude",
Motif => "wxMotif",
MotifH => "wxMotifInclude",
ProtoH => "wxProtocolInclude",
Unix => "wxUnix",
UnixH => "wxUnixInclude",
WXH => "wxWxInclude",
);
line: while ( defined($_ = <FILELIST>) ) {
chomp;
#! comment or blank line, skip
next line if ( $_ eq "" or /^#/ );
#! if ( $verbose ) {
#! print STDERR "Processing line: '$_'\n";
#! }
my @fields = split /\t/;
#! first column is filename, second is type, third is flags
my ($filename, $filetype, $fileflags) = @fields;
if ( $#fields > 2 ) {
warn "Ignoring malformed line $_ in the filelist file.\n";
next line;
} elsif ( $#fields == 1 ) {
#! add an empty flags string
$fileflags = "";
}
if ( $verbose ) {
print STDERR "File $filename: type '$filetype', flags '$fileflags'\n";
}
#! save all information in @wxALL
my $fileinfo = new wxFileInfo( $filename, $filetype, $fileflags );
push @wxALL, $fileinfo;
#! this is a bit stupid but all templates are written using the old
#! single letter flags which became so unreadable that I decided to
#! replace them with more readable strings, but it was easier to do
#! the translation here instead of changing all *.t files
$fileflags =~ s/Base/B/;
$fileflags =~ s/NotWin32/16/;
$fileflags =~ s/Win32Only/32/;
$fileflags =~ s/Generic/G/;
$fileflags =~ s/OLE/O/;
$fileflags =~ s/Socket/S/;
$fileflags =~ s/NotMSW/U/;
$fileflags =~ s/NotOS2/P/;
$fileflags =~ s/LowLevel/L/;
$fileflags =~ s/Theme/T/;
if ( $filetype eq "Common" ) {
$wxCommon{$filename} = $fileflags;
} elsif ( $filetype eq "Generic" ) {
$wxGeneric{$filename} = $fileflags;
} elsif ( $filetype eq "MSW" ) {
$wxMSW{$filename} = $fileflags;
} elsif ( $filetype eq "Mac" ) {
$wxMAC{$filename} = $fileflags;
} elsif ( $filetype eq "Cocoa" ) {
$wxCOCOA{$filename} = $fileflags;
} elsif ( $filetype eq "Motif" ) {
$wxMOTIF{$filename} = $fileflags;
} elsif ( $filetype eq "GTK" ) {
$wxGTK{$filename} = $fileflags;
} elsif ( $filetype eq "Univ" ) {
$wxUNIV{$filename} = $fileflags;
} elsif ( $filetype eq "MGL" ) {
$wxMGL{$filename} = $fileflags;
} elsif ( $filetype eq "Micro" ) {
$wxMICRO{$filename} = $fileflags;
} elsif ( $filetype eq "OS2" ) {
$wxOS2PM{$filename} = $fileflags;
} elsif ( $filetype eq "X11" ) {
$wxX11{$filename} = $fileflags;
} elsif ( $filetype eq "HTML" ) {
$wxHTML{$filename} = $fileflags;
} elsif ( $filetype eq "Unix" ) {
$wxUNIX{$filename} = $fileflags;
} elsif ( $filetype eq "BaseOnly" ) {
$wxBase{$filename} = $fileflags;
} elsif ( $filetype eq "WXH" ) {
$wxWXINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "ProtoH" ) {
$wxPROTOCOLINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "HtmlH" ) {
$wxHTMLINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "MacH" ) {
$wxMACINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "CocoaH" ) {
$wxCOCOAINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "MotifH" ) {
$wxMOTIFINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "MSWH" && $fileflags =~ m/O/ ) {
$wxOLEINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "MSWH" ) {
$wxMSWINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "GTKH" ) {
$wxGTKINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "OS2H" ) {
$wxOS2PMINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "MGLH" ) {
$wxMGLINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "X11H" ) {
$wxX11INCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "UnivH" ) {
$wxUNIVINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "UnixH" ) {
$wxUNIXINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "GenericH" ) {
$wxGENERICINCLUDE{$filename} = $fileflags;
} elsif ( $filetype eq "MacR" ) {
$wxMACRESOURCE{$filename} = $fileflags;
} elsif ( $filetype eq "CocoaR" ) {
$wxCOCOARESOURCE{$filename} = $fileflags;
} else {
warn "Unknown file type $filetype for $filename, ignoring.\n";
next line;
}
}
close(FILELIST);
#$}
#! vim:sw=4:ts=4:list:et:ft=perl
| 40.218543 | 82 | 0.490532 |
edca2504b9ab1a89e0c2907991cc584a76bb9dfd | 67 | t | Perl | resources/nvim/skeleton/skeleton.pl.t | yudoufu/dotfiles | 3dcd215066b5802e81fe50a743479df30cc7df21 | [
"MIT"
] | 1 | 2020-11-02T03:11:08.000Z | 2020-11-02T03:11:08.000Z | resources/nvim/skeleton/skeleton.pl.t | yudoufu/dotfiles | 3dcd215066b5802e81fe50a743479df30cc7df21 | [
"MIT"
] | null | null | null | resources/nvim/skeleton/skeleton.pl.t | yudoufu/dotfiles | 3dcd215066b5802e81fe50a743479df30cc7df21 | [
"MIT"
] | 1 | 2021-04-01T02:06:12.000Z | 2021-04-01T02:06:12.000Z | use strict;
use warnings;
use utf8;
use Test::More;
done_testing;
| 9.571429 | 15 | 0.746269 |
eddc6abd50ee9be280be53c1ba3b2cca935b220a | 8,365 | pm | Perl | modules/Services/lib/CoGe/Services/Auth.pm | LyonsLab/coge | 1d9a8e84a8572809ee3260ede44290e14de3bdd1 | [
"BSD-2-Clause"
] | 37 | 2015-02-24T18:58:30.000Z | 2021-03-07T21:22:18.000Z | modules/Services/lib/CoGe/Services/Auth.pm | LyonsLab/coge | 1d9a8e84a8572809ee3260ede44290e14de3bdd1 | [
"BSD-2-Clause"
] | 12 | 2016-06-09T21:57:00.000Z | 2020-09-11T18:48:51.000Z | modules/Services/lib/CoGe/Services/Auth.pm | LyonsLab/coge | 1d9a8e84a8572809ee3260ede44290e14de3bdd1 | [
"BSD-2-Clause"
] | 19 | 2016-03-26T08:15:17.000Z | 2021-04-12T05:03:29.000Z | package CoGe::Services::Auth;
use Mojo::UserAgent;
use Data::Dumper;
use URI::Escape::JavaScript qw(unescape);
use JSON qw(decode_json);
use CoGe::Accessory::Web qw(get_defaults add_user parse_proxy_response jwt_decode_token);
use File::Spec::Functions qw(catfile);
###############################################################################
# Authentication
#
#
###############################################################################
sub init {
my $self = shift;
return unless $self;
# print STDERR Dumper $self->req, "\n";
my $username = $self->param('username');
my $token = $self->param('token');
my $token2 = $self->req->headers->header('x-iplant-de-jwt'); # mdb added 9/23/15 for DE
my $token3 = $self->req->headers->header('x-coge-jwt'); # mdb added 7/20/16 for private data requests from within CoGe
my $remote_ip = $ENV{REMOTE_ADDR}; #$self->req->env->{HTTP_X_FORWARDED_FOR};
# print STDERR "CoGe::Services::Auth::init" .
# " username=" . ($username ? $username : '') .
# " token=" . ($token ? $token : '') .
# " token2=" . ($token2 ? $token2 : '') .
# " token3=" . ($token3 ? $token3 : '') .
# " remote_ip=" . ($remote_ip ? $remote_ip : '') . "\n";
# Get config
my $conf = get_defaults();
unless (defined $conf) {
print STDERR "CoGe::Services::Auth::init: couldn't load config file\n";
return;
}
# Connect to DB
my $db = CoGeX->dbconnect($conf);
# if ($debug) { # enable ORM debugging if requested
# $db->storage->debugobj(new DBIxProfiler());
# $db->storage->debug(1);
# }
unless (defined $db) {
print STDERR "CoGe::Services::Auth::init: couldn't connect to database\n";
return;
}
# Get user from DB
my $user;
if ($username) {
$user = $db->resultset('User')->find( { user_name => $username } );
}
# Check for existing user session (cookie enabled browser only)
my $cookie = $self->cookie($conf->{COOKIE_NAME});
if ($cookie) {
my $session_id = unescape($cookie);
if ($session_id) {
#print STDERR "session_id: ", $session_id, "\n";
$session_id =~ s/session&//;
my $session = $db->resultset('UserSession')->find( { session => $session_id } );
if ($session) {# && $user && $session->user_id == $user->id) { # mdb changed 3/7/16 for hypnotoad
$user = $db->resultset('User')->find($session->user_id); # mdb added 3/7/16 for hypnotoad
# print STDERR "CoGe::Services::Auth::init using existing session for user '", $user->name, "'\n";
if ($user->is_admin) {
my $user_id = $self->cookie('user_id');
if ($user_id) {
my $u = $db->resultset('User')->find($user_id);
$user = $u if $u;
}
}
return ( $db, $user, $conf );
}
}
}
# Otherwise, try to validate user token
if ($token || $token2 || $token3) {
my ($uname, $fname, $lname, $email);
if ($token) { # Agave
($uname, $fname, $lname, $email) = validate_agave($username, $token);
}
elsif ($token2) { # DE JWT
my $de_public_key_path = catfile($conf->{RESOURCEDIR}, $conf->{DE_PUBLIC_KEY});
($uname, $fname, $lname, $email) = validate_jwt($token2, $de_public_key_path);
}
elsif ($token3) { # CoGe JWT
my $coge_secret_path = catfile($conf->{RESOURCEDIR}, $conf->{JWT_COGE_SECRET});
($uname, $fname, $lname, $email) = validate_jwt($token3, $coge_secret_path);
}
unless ($uname) {
print STDERR "CoGe::Services::Auth::init: token validation failed\n";
return ( $db, undef, $conf );
}
# Add new user to DB
if (!$user) {
print STDERR "CoGe::Services::Auth::init: adding user '", $uname, "'\n";
$user = add_user($db, $uname, $fname, $lname, $email);
}
return ( $db, $user, $conf );
}
# Return unauthenticated response if no token or existing session
# print STDERR "CoGe::Services::Auth::init finished with no authentication\n";
return ( $db, undef, $conf );
}
sub validate_jwt {
my $token = shift;
my $key_path = shift;
return unless $token;
# print STDERR "CoGe::Services::Auth::validate_jwt\n";
# Get path to JWT key
unless ($key_path && -r $key_path) {
print STDERR "CoGe::Services::Auth::init: missing JWT key file\n";
return;
}
# Decode token and get payload
my $claims = jwt_decode_token($token, $key_path);
unless ($claims) {
print STDERR "CoGe::Services::Auth::validate_jwt: JWT token decoding failed\n";
return;
}
my $uname = $claims->{'sub'};
my $fname = $claims->{'given_name'};
my $lname = $claims->{'family_name'};
my $email = $claims->{'email'};
# print STDERR "CoGe::Services::Auth::validate_jwt: success! ", ($uname ? $uname : ''), "\n";
return ($uname, $fname, $lname, $email);
}
sub validate_agave {
my ($username, $token) = @_;
return unless ($username and $token);
# print STDERR "CoGe::Services::Auth::validate_agave: username=$username token=$token\n";
my ($uname, $fname, $lname, $email);
# Note: Mojolicious requires IO::Socket::SSL 1.75, do "cpan upgrade IO::Socket::SSL"
my $ua = Mojo::UserAgent->new;
# CAS Proxy - mdb added 7/20/15 for DE # replaced by JWT method
# if ($token_type eq 'cas') {
# # Get URL for CAS
# my $CAS_URL = get_defaults()->{CAS_URL};
# unless ($CAS_URL) {
# print STDERR "CoGe::Services::Auth::validate: missing CAS_URL\n";
# return;
# }
#
# # Validate proxy ticket and get user credentials
# $this_url =~ s/\?.+$//; # remove query params
# my $url = $CAS_URL.'/proxyValidate?service='.$this_url.'&ticket='.$token;
# my $res = $ua->get($url)->res;
# print STDERR Dumper $res, "\n";
#
# ($uname, $fname, $lname, $email) = parse_proxy_response($res->{content}{asset}{content});
# unless ($uname) {
# print STDERR 'CoGe::Services::Auth::validate_agave: CAS failed to authenticate, message=',
# ' url=', $url, "\n";
# return;
# }
# }
# Agave API (default) ------------------------------------------------------
# Get URL for Agave User API endpoint
my $USER_API_URL = get_defaults()->{USER_API_URL};
unless ($USER_API_URL) {
print STDERR "CoGe::Services::Auth::validate_agave: missing USER_API_URL\n";
return;
}
# Validate token and get user credentials. We lookup the 'me' profile
# for the given token to verify that it belongs to given username.
# See http://developer.agaveapi.co/?shell#client-credentials
my $url = $USER_API_URL . '/me';
my $res = $ua->get($url, { Authorization => "Bearer $token" })->res;
unless ($res and $res->{message} eq 'OK') {
print STDERR 'CoGe::Services::Auth::validate: user agent error, message=',
($res ? $res->{message} : 'undef'),
' url=', $url, "\n";
print STDERR Dumper $res, "\n" if ($res);
return;
}
# Extract user information and verify that the given username owns the given token
#print STDERR Dumper $res->body, "\n";
my $authResponse = decode_json($res->body);
unless ($authResponse && $authResponse->{status} =~ /success/i &&
$authResponse->{result} && $authResponse->{result}->{username} eq $username)
{
print STDERR 'CoGe::Services::Auth::validate_agave: Agave failed to authenticate, message=',
($authResponse ? $authResponse->{message} : 'undef'),
' url=', $url, "\n";
print STDERR Dumper $authResponse, "\n" if ($authResponse);
return;
}
$uname = $authResponse->{result}->{username};
$fname = $authResponse->{result}->{firstName};
$lname = $authResponse->{result}->{lastName};
$email = $authResponse->{result}->{email};
# print STDERR "CoGe::Services::Auth::validate_agave: success! ", ($uname ? $uname : ''), "\n";
return ($uname, $fname, $lname, $email);
}
1;
| 38.37156 | 130 | 0.549791 |
edd1a76882bbbbdccd403de788bce03e9f96feef | 5,240 | t | Perl | integration/advent2013-day14.t | perl6/roast | 30e8226c1a0562b9364ee9ea2730763374d79a3d | [
"Artistic-2.0"
] | 99 | 2015-03-03T13:01:44.000Z | 2020-03-05T15:21:43.000Z | integration/advent2013-day14.t | perl6/roast | 30e8226c1a0562b9364ee9ea2730763374d79a3d | [
"Artistic-2.0"
] | 331 | 2015-02-17T15:26:22.000Z | 2020-03-16T18:29:49.000Z | integration/advent2013-day14.t | perl6/roast | 30e8226c1a0562b9364ee9ea2730763374d79a3d | [
"Artistic-2.0"
] | 136 | 2015-02-02T13:34:10.000Z | 2020-02-18T02:26:59.000Z | use v6;
use Test;
plan 10;
# Promises
{
my $p1000 = start {
(1..Inf).grep(*.is-prime)[999]
}
is $p1000.result, 7919, 'simple promise';
}
class CurrencyExchange {
has Int $.delay;
has Str $.id;
method get_quote($val) {
sleep( $.delay );
return $val * $.delay;
}
}
my @currency_exchanges = (CurrencyExchange.new( :id<fast>, :delay(1) ),
CurrencyExchange.new( :id<med>, :delay(3) ),
CurrencyExchange.new( :id<slow>, :delay(7) ), # wont finish in 5 sec
);
{
my $val = 42;
my @getting = @currency_exchanges.map(-> $ex { start { $ex.get_quote($val) } });
await Promise.anyof(Promise.allof(@getting), Promise.in(5));
my @quotes = @getting.grep(*.status == Kept).map(*.result);
is-deeply @quotes, [42, 42*3], 'quotes example';
}
{
my $p1000 = start {
(1..Inf).grep(*.is-prime)[999]
}
my $base16 = $p1000.then(sub ($res) {
$res.result.base(16)
});
my $pwrite = $base16.then(sub ($res) {
return 'p1000.txt';
});
is $base16.result, '1EEF', '.then chaining';
is $pwrite.result, 'p1000.txt', '.then chaining';
}
{
# Create the promise.
my $p = Promise.new;
# Take the "vow" object, used to keep/break it.
my $v = $p.vow;
# keep this promise
my $result = 42;
$v.keep($result);
is $p.status, 'Kept', 'kept promise';
}
{
# Create the promise.
my $p = Promise.new;
# Take the "vow" object, used to keep/break it.
my $v = $p.vow;
my $exception_or_message = 'broken promise';
$v.break($exception_or_message);
is $p.status, 'Broken', 'broken promise';
}
# Channels
#?rakudo.jvm skip 'hangs'
{
{
my @files = qw<config1.ini config2.ini>;
my %config = read_all(@files);
is %config<font><size>, '10', 'combined config (font/size)';
is %config<font><style>, 'italic', 'combined config (font/style)';
is %config<font><color>, 'red', 'combined config (font/color)';
is %config<line><style>, 'dashed', 'combined config (line/style)';
}
sub read_all(@files) {
my $read = Channel.new;
my $parsed = Channel.new;
read_worker(@files, $read);
parse_worker($read, $parsed);
my %all_config = await config_combiner($parsed);
$read.close; $parsed.close;
return %all_config;
}
sub read_worker(@files, $dest) {
# simulated slurp()
sub Slurp($name) {
my %files = (
'config1.ini' => q:to"END1",
[font]
size = 10
style = italic
[line]
style = dashed
END1
'config2.ini' => q:to"END2",
[font]
color = red
[line]
height = 0.5
END2
);
return %files{$name}
}
start {
for @files -> $file {
$dest.send( Slurp($file) );
}
$dest.close();
CATCH { diag 'read_worker failure:' ~ $_; $dest.fail($_) }
}
}
sub parse_worker($source, $dest) {
my grammar INIFile {
token TOP {
^
<entries>
<section>+
$
}
token section {
'[' ~ ']' <key> \n
<entries>
}
token entries {
[
| <entry> \n
| \n
]*
}
rule entry { <key> '=' <value> }
token key { \w+ }
token value { \N+ }
token ws { \h* }
}
my class INIFileActions {
method TOP($/) {
my %result;
%result<_> = $<entries>.ast;
for @<section> -> $sec {
%result{$sec<key>} = $sec<entries>.ast;
}
make %result;
}
method entries($/) {
my %entries;
for @<entry> -> $e {
%entries{$e<key>} = ~$e<value>;
}
make %entries;
}
}
start {
react {
whenever $source {
if INIFile.parse($_, :actions(INIFileActions)) -> $parsed {
$dest.send($parsed.ast);
}
else {
$dest.fail("Could not parse INI file");
last;
}
}
}
$dest.close();
CATCH { diag 'parse worker failure:' ~ $_; $dest.fail($_) }
}
}
sub config_combiner($source) {
my $p = Promise.new;
my $v = $p.vow;
start {
my %result;
react {
whenever $source {
for %^content.kv -> $sec, %kvs {
for %kvs.kv -> $k, $v {
%result{$sec}{$k} = $v;
}
}
}
}
$v.keep(%result);
CATCH { diag "combiner failure:" ~ $_; $v.break($_) }
}
return $p;
}
}
# vim: expandtab shiftwidth=4
| 24.036697 | 84 | 0.431679 |
eda1ee6e747603001bbdf383fb24a5659c9ca1bb | 3,676 | t | Perl | t/controller/source.t | zakame/metacpan-web | c23bc1cff46c057c7047d6c2ff520e942240b7ac | [
"Artistic-1.0"
] | null | null | null | t/controller/source.t | zakame/metacpan-web | c23bc1cff46c057c7047d6c2ff520e942240b7ac | [
"Artistic-1.0"
] | null | null | null | t/controller/source.t | zakame/metacpan-web | c23bc1cff46c057c7047d6c2ff520e942240b7ac | [
"Artistic-1.0"
] | null | null | null | use strict;
use warnings;
use Test::More;
use MetaCPAN::Web::Test;
test_psgi app, sub {
my $cb = shift;
ok( my $res = $cb->( GET '/pod/Moose' ), 'GET /pod/Moose' );
is( $res->code, 200, 'code 200' );
my $tx = tx($res);
ok( my $source = $tx->find_value('//a[text()="Source"]/@href'),
'contains link to Source' );
ok( $res = $cb->( GET $source ), "GET $source" );
ok( $res->code(200), 'code 200' );
is(
$res->header('Content-Type'),
'text/html; charset=utf-8',
'Content-type text/html; charset=utf-8'
);
test_cache_headers(
$res,
{
cache_control => 'max-age=3600',
surrogate_key =>
'SOURCE dist=MOOSE author=ETHER content_type=text/html content_type=text',
surrogate_control => 'max-age=31556952, stale-if-error=2592000',
}
);
ok( $res->content =~ /package Moose/, 'includes Moose package' );
{
# Check a URL that is the 'latest', e.g. no version num
my $uri = '/source/Moose';
ok( my $res = $cb->( GET $uri ), "GET $uri" );
is( $res->code, 200, 'code 200' );
test_cache_headers(
$res,
{
cache_control => 'max-age=3600',
surrogate_key =>
'SOURCE dist=MOOSE author=ETHER content_type=text/html content_type=text',
surrogate_control =>
'max-age=31556952, stale-if-error=2592000',
}
);
}
{
# Test the html produced once; test different filetypes below.
my $prefix = '/source/RJBS/Dist-Zilla-5.043';
my @tests = ( [ pl => "$prefix/bin/dzil" ], );
foreach my $test (@tests) {
my ( $type, $uri ) = @$test;
ok( my $res = $cb->( GET $uri ), "GET $uri" );
is( $res->code, 200, 'code 200' );
my $tx = tx($res);
like(
$tx->find_value(q{//div[@class="content"]/pre/code/@class}),
qr/\blanguage-perl\b/,
'has pre-block with expected syntax brush'
);
}
}
};
{
# Test filetype detection. This is based on file attributes so we don't
# need to do the API hits to test each type.
my @tests = (
[ perl => 'lib/Template/Manual.pod' ], # pod
[ perl => 'lib/Dist/Zilla.pm' ],
[ perl => 'Makefile.PL' ],
[ javascript => 'META.json' ],
[ javascript => 'script.js' ],
[ yaml => 'META.yml' ],
[ yaml => 'config.yaml' ],
[ c => 'foo.c' ],
[ c => 'bar.h' ],
[ c => 'baz.xs' ],
[ cpanchanges => 'Changes' ],
[ perl => { path => 'bin/dzil', mime => 'text/x-script.perl' } ],
# There wouldn't normally be a file with no path
# but that doesn't mean this shouldn't work.
[ perl => { mime => 'text/x-script.perl' } ],
[ plain => 'README' ],
);
foreach my $ft_test (@tests) {
my ( $filetype, $file ) = @$ft_test;
ref $file or $file = { path => $file };
is
MetaCPAN::Web::Controller::Source->detect_filetype($file),
$filetype,
"detected filetype '$filetype' for: " . join q{ }, %$file;
}
{
my @warnings;
local $SIG{__WARN__} = sub { push @warnings, $_[0] };
# Test no 'path' and no 'mime'.
is MetaCPAN::Web::Controller::Source->detect_filetype( {} ),
'plain', 'default to plain text';
is scalar(@warnings), 0, 'no warnings when path and mime are undef'
or diag explain \@warnings;
}
}
done_testing;
| 29.886179 | 94 | 0.492655 |
edc8d38357e40ac33af99cd03a2537ff37aa1695 | 13,556 | t | Perl | tests/api-hunt.t | emamirazavi/moloch | 0e2a1edc5878de0605016d25bac6b5590ddcb317 | [
"Apache-2.0"
] | 3 | 2015-01-20T15:20:05.000Z | 2019-01-18T16:20:46.000Z | tests/api-hunt.t | paulpc/moloch | dc28e0090d46e28cadc9b6861c98422ac74e376f | [
"Apache-2.0"
] | null | null | null | tests/api-hunt.t | paulpc/moloch | dc28e0090d46e28cadc9b6861c98422ac74e376f | [
"Apache-2.0"
] | null | null | null | use Test::More tests => 214;
use Cwd;
use URI::Escape;
use MolochTest;
use JSON;
use Test::Differences;
use Data::Dumper;
use strict;
my $token = getTokenCookie();
my $otherToken = getTokenCookie('user2');
my $json;
# Delete old hunts
esPost("/tests_hunts/hunt/_delete_by_query?conflicts=proceed&refresh", '{ "query": { "match_all": {} } }');
# Create huntuser
$json = viewerPostToken("/user/create", '{"userId": "huntuser", "userName": "UserName", "enabled":true, "password":"password", "packetSearch":true}', $token);
my $hToken = getTokenCookie('huntuser');
# Must have token to add a hunt
$json = viewerPost("/hunt", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true}}');
my $hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without a token");
# Must apply to sessions to add a hunt
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":0,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job that doesn't apply to sessions");
# Must have a name to add a hunt
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":1,"size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without a name");
# Must have a size to add a hunt
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":1,"name":"test hunt","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without a size");
# Must have search text to add a hunt
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","searchType":"ascii","type":"raw","src":true,"dst":true}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without search text");
# Must have search text type to add a hunt
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","type":"raw","src":true,"dst":true}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without search text type");
# Must have a valid search text type to add a hunt
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"asdf","type":"raw","src":true,"dst":true}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without search text type");
# Must have a type to add a hunt
$json = viewerPostToken("/hunt",'{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","src":true,"dst":true}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without a type");
# Must have a valid type to add a hunt
$json = viewerPostToken("/hunt",'{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","type":"asdf","src":true,"dst":true}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without a type");
# Must have src or dst to add a hunt
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","type":"raw"}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without a type");
# Must have query to add a hunt
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without a query");
# Must have fully formed query to add a hunt
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true,"query":{"startTime":18000}}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without a query stopTime");
$json = viewerPostToken("/hunt", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true,"query":{"stopTime":1536872891}}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 0, "Can't add a job without a query starTime");
# Add a valid hunt
$json = viewerPostToken("/hunt?molochRegressionUser=anonymous", '{"hunt":{"totalSessions":1,"name":"test hunt~`!@#$%^&*()[]{};<>?/`","size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true,"query":{"startTime":18000,"stopTime":1536872891}}}', $token);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 1, "Add hunt 1");
# Make sure the hunt's name doesn't contain special chars
is ($json->{hunt}->{name}, "test hunt", "Strip special chars");
# If the user is not an admin they can only delete their own hunts
my $id1 = $json->{hunt}->{id};
$json = viewerDeleteToken("/hunt/$id1?molochRegressionUser=user2", $otherToken);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 1, "Non admin user cannot delete another user's hunt");
$json = viewerPostToken("/hunt?molochRegressionUser=user2", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true,"query":{"startTime":18000,"stopTime":1536872891}}}', $otherToken);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 2, "Add hunt 2");
my $id2 = $json->{hunt}->{id};
$json = viewerDeleteToken("/hunt/$id2?molochRegressionUser=user2", $otherToken);
$hunts = viewerGet("/hunt/list");
is (@{$hunts->{data}}, 1, "User can remove their own hunt");
# If the user is not an admin they can only pause their own hunts
$json = viewerPostToken("/hunt?molochRegressionUser=anonymous", '{"hunt":{"totalSessions":1,"name":"test hunt~`!@#$%^&*()[]{};<>?/`","size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true,"query":{"startTime":18000,"stopTime":1536872891}}}', $token);
my $id3 = $json->{hunt}->{id};
$json = viewerPutToken("/hunt/$id3/pause?molochRegressionUser=user2", $otherToken);
is ($json->{text}, "You cannot change another user\'s hunt unless you have admin privileges", "Non admin user cannot pause another user's hunt");
# If the user is not an admin they can only play their own hunts
$json = viewerPutToken("/hunt/$id3/play?molochRegressionUser=user2", $otherToken);
is ($json->{text}, "You cannot change another user\'s hunt unless you have admin privileges", "Non admin user cannot pause another user's hunt");
# Admin can delete any hunt
$json = viewerPostToken("/hunt?molochRegressionUser=user2", '{"hunt":{"totalSessions":1,"name":"test hunt","size":"50","search":"test search text","searchType":"ascii","type":"raw","src":true,"dst":true,"query":{"startTime":18000,"stopTime":1536872891}}}', $otherToken);
my $id4 = $json->{hunt}->{id};
$json = viewerDeleteToken("/hunt/$id4?molochRegressionUser=anonymous", $token);
is (@{$hunts->{data}}, 1, "Admin can remove any hunt");
# multiget should return an error
my $mjson = multiGet("/hunt/list");
is ($mjson->{text}, "Not supported in multies", "Hunt not supported in multies");
## Now test hunts
my (%HUNTS, %RESULTS);
# Create 6 hunts based on the search type and search string
sub createHunts {
my ($stype, $str) = @_;
$HUNTS{"raw-$stype-both-$str"} = viewerPostToken("/hunt?molochRegressionUser=huntuser", '{"hunt":{"totalSessions":1,"name":"test-a","size":"50","search":"' . $str . '","searchType":"' . $stype . '","type":"raw","src":true,"dst":true,"query":{"startTime":18000,"stopTime":1536872891, "expression": "file == *http-wrapped-header.pcap"}}}', $hToken);
$HUNTS{"raw-$stype-src-$str"} = viewerPostToken("/hunt?molochRegressionUser=huntuser", '{"hunt":{"totalSessions":1,"name":"test-b","size":"50","search":"' . $str . '","searchType":"' . $stype . '","type":"raw","src":true,"dst":false,"query":{"startTime":18000,"stopTime":1536872891, "expression": "file == *http-wrapped-header.pcap"}}}', $hToken);
$HUNTS{"raw-$stype-dst-$str"} = viewerPostToken("/hunt?molochRegressionUser=huntuser", '{"hunt":{"totalSessions":1,"name":"test-c","size":"50","search":"' . $str . '","searchType":"' . $stype . '","type":"raw","src":false,"dst":true,"query":{"startTime":18000,"stopTime":1536872891, "expression": "file == *http-wrapped-header.pcap"}}}', $hToken);
$HUNTS{"reassembled-$stype-both-$str"} = viewerPostToken("/hunt?molochRegressionUser=huntuser", '{"hunt":{"totalSessions":1,"name":"test-d","size":"50","search":"' . $str . '","searchType":"' . $stype . '","type":"reassembled","src":true,"dst":true,"query":{"startTime":18000,"stopTime":1536872891, "expression": "file == *http-wrapped-header.pcap"}}}', $hToken);
$HUNTS{"reassembled-$stype-src-$str"} = viewerPostToken("/hunt?molochRegressionUser=huntuser", '{"hunt":{"totalSessions":1,"name":"test-e","size":"50","search":"' . $str . '","searchType":"' . $stype . '","type":"reassembled","src":true,"dst":false,"query":{"startTime":18000,"stopTime":1536872891, "expression": "file == *http-wrapped-header.pcap"}}}', $hToken);
$HUNTS{"reassembled-$stype-dst-$str"} = viewerPostToken("/hunt?molochRegressionUser=huntuser", '{"hunt":{"totalSessions":1,"name":"test-f","size":"50","search":"' . $str . '","searchType":"' . $stype . '","type":"reassembled","src":false,"dst":true,"query":{"startTime":18000,"stopTime":1536872891, "expression": "file == *http-wrapped-header.pcap"}}}', $hToken);
}
# Check hunt vars given name and what the match count should be
sub checkHunt {
my ($name, $match) = @_;
my $id = $HUNTS{$name}->{hunt}->{id};
my $result = $RESULTS{$id};
is ($result->{status}, 'finished', "$name finished check");
is ($result->{searchedSessions}, 1, "$name searchedSessions check");
is ($result->{totalSessions}, 1, "$name totalSessions check");
is ($result->{matchedSessions}, $match, "$name match check");
}
createHunts("ascii", "Get");
createHunts("ascii", "Gif");
createHunts("asciicase", "Get");
createHunts("asciicase", "Gif");
createHunts("asciicase", "GET");
createHunts("regex", "G..89");
createHunts("hex", "766d663d");
createHunts("hexregex", "766..63d");
# Actually process the hunts
viewerGet("/processHuntJobs");
# create hash of results
$hunts = viewerGet("/hunt/list?history=true");
foreach my $item (@{$hunts->{data}}) {
$RESULTS{$item->{id}} = $item;
}
# Check results
checkHunt("raw-ascii-both-Get", 1);
checkHunt("raw-ascii-src-Get", 1);
checkHunt("raw-ascii-dst-Get", 0);
checkHunt("reassembled-ascii-both-Get", 1);
checkHunt("reassembled-ascii-src-Get", 1);
checkHunt("reassembled-ascii-dst-Get", 0);
checkHunt("raw-ascii-both-Gif", 1);
checkHunt("raw-ascii-src-Gif", 0);
checkHunt("raw-ascii-dst-Gif", 1);
checkHunt("reassembled-ascii-both-Gif", 1);
checkHunt("reassembled-ascii-src-Gif", 0);
checkHunt("reassembled-ascii-dst-Gif", 1);
checkHunt("raw-asciicase-both-Get", 0);
checkHunt("raw-asciicase-src-Get", 0);
checkHunt("raw-asciicase-dst-Get", 0);
checkHunt("reassembled-asciicase-both-Get", 0);
checkHunt("reassembled-asciicase-src-Get", 0);
checkHunt("reassembled-asciicase-dst-Get", 0);
checkHunt("raw-asciicase-both-Gif", 0);
checkHunt("raw-asciicase-src-Gif", 0);
checkHunt("raw-asciicase-dst-Gif", 0);
checkHunt("reassembled-asciicase-both-Gif", 0);
checkHunt("reassembled-asciicase-src-Gif", 0);
checkHunt("reassembled-asciicase-dst-Gif", 0);
checkHunt("raw-asciicase-both-GET", 1);
checkHunt("raw-asciicase-src-GET", 1);
checkHunt("raw-asciicase-dst-GET", 0);
checkHunt("reassembled-asciicase-both-GET", 1);
checkHunt("reassembled-asciicase-src-GET", 1);
checkHunt("reassembled-asciicase-dst-GET", 0);
checkHunt("raw-regex-both-G..89", 1);
checkHunt("raw-regex-src-G..89", 0);
checkHunt("raw-regex-dst-G..89", 1);
checkHunt("reassembled-regex-both-G..89", 1);
checkHunt("reassembled-regex-src-G..89", 0);
checkHunt("reassembled-regex-dst-G..89", 1);
checkHunt("raw-hex-both-766d663d", 1);
checkHunt("raw-hex-src-766d663d", 1);
checkHunt("raw-hex-dst-766d663d", 0);
checkHunt("reassembled-hex-both-766d663d", 1);
checkHunt("reassembled-hex-src-766d663d", 1);
checkHunt("reassembled-hex-dst-766d663d", 0);
checkHunt("raw-hexregex-both-766..63d", 1);
checkHunt("raw-hexregex-src-766..63d", 1);
checkHunt("raw-hexregex-dst-766..63d", 0);
checkHunt("reassembled-hexregex-both-766..63d", 1);
checkHunt("reassembled-hexregex-src-766..63d", 1);
checkHunt("reassembled-hexregex-dst-766..63d", 0);
# cleanup
$json = viewerPostToken("/user/delete", "userId=huntuser", $token);
viewerDeleteToken("/hunt/$id1?molochRegressionUser=anonymous", $token);
viewerDeleteToken("/hunt/$id3?molochRegressionUser=anonymous", $token);
# esPost("/tests_hunts/hunt/_delete_by_query?conflicts=proceed&refresh", '{ "query": { "match_all": {} } }');
| 55.557377 | 367 | 0.651225 |
Subsets and Splits