hexsha
stringlengths 40
40
| size
int64 3
1.05M
| ext
stringclasses 163
values | lang
stringclasses 53
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
112
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
float64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
113
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
float64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
113
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
float64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.05M
| avg_line_length
float64 1
966k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
edb8e9193c9cfe47809390d9501642d04bd770b6 | 1,243 | pl | Perl | data/processed/asia_0.3_0.5_150_input.pl | VincentDerk/Paper-AC-Decisions-Learning | abd1dc8893fbb11b43ebb49a25e26c0183bdba62 | [
"Apache-2.0"
] | null | null | null | data/processed/asia_0.3_0.5_150_input.pl | VincentDerk/Paper-AC-Decisions-Learning | abd1dc8893fbb11b43ebb49a25e26c0183bdba62 | [
"Apache-2.0"
] | null | null | null | data/processed/asia_0.3_0.5_150_input.pl | VincentDerk/Paper-AC-Decisions-Learning | abd1dc8893fbb11b43ebb49a25e26c0183bdba62 | [
"Apache-2.0"
] | null | null | null | 0.01::asia.
0.5::smoke.
body_5(4,lung) :- smoke.
body_15(13,lung) :- \+smoke.
body_23(22,tub) :- asia.
body_33(31,tub) :- \+asia.
body_41(40,bronc) :- smoke.
body_51(49,bronc) :- \+smoke.
either :- lung, tub.
either :- lung, \+tub.
either :- \+lung, tub.
body_78(73,either) :- \+lung, \+tub.
body_86(85,xray) :- either.
body_96(94,xray) :- \+either.
body_106(103,dysp) :- bronc, either.
body_118(114,dysp) :- bronc, \+either.
body_129(125,dysp) :- \+bronc, either.
body_141(136,dysp) :- \+bronc, \+either.
query(smoke).
query(lung).
query(tub).
query(either).
query(bronc).
query(xray).
query(asia).
query(dysp).
utility(smoke,44).
utility(\+(smoke),t(V_0)) :- true.
utility(\+(tub),9).
utility(either,t(V_0)) :- true.
utility(\+(bronc),10).
utility(\+(xray),t(V_0)) :- true.
utility(asia,t(V_0)) :- true.
utility(dysp,-27).
0.1::lung :- body_5(4,lung).
0.01::lung :- body_15(13,lung).
0.05::tub :- body_23(22,tub).
0.01::tub :- body_33(31,tub).
0.6::bronc :- body_41(40,bronc).
0.3::bronc :- body_51(49,bronc).
0.0::either :- body_78(73,either).
0.98::xray :- body_86(85,xray).
0.05::xray :- body_96(94,xray).
0.9::dysp :- body_106(103,dysp).
0.8::dysp :- body_118(114,dysp).
0.7::dysp :- body_129(125,dysp).
0.1::dysp :- body_141(136,dysp).
| 25.895833 | 40 | 0.634755 |
73d96f051ad23c7d82a43c9d8da2014a30c3a17b | 7,329 | pl | Perl | sdk-6.5.16/tools/lmsc.pl | copslock/broadcom_cpri | 8e2767676e26faae270cf485591902a4c50cf0c5 | [
"Spencer-94"
] | null | null | null | sdk-6.5.16/tools/lmsc.pl | copslock/broadcom_cpri | 8e2767676e26faae270cf485591902a4c50cf0c5 | [
"Spencer-94"
] | null | null | null | sdk-6.5.16/tools/lmsc.pl | copslock/broadcom_cpri | 8e2767676e26faae270cf485591902a4c50cf0c5 | [
"Spencer-94"
] | null | null | null | #!/usr/bin/perl
#
#
# This license is set out in https://raw.githubusercontent.com/Broadcom-Network-Switching-Software/OpenBCM/master/Legal/LICENSE file.
#
# Copyright 2007-2019 Broadcom Inc. All rights reserved.
#
################################################################################
#
# lmsc.pl
#
# Linux Module Symbol Checker
#
# Look for all undefined or non-exported symbols in the given set of files.
#
sub help
{
print "Linux Module Symbol Checker\n\n";
print " Check for undefined symbols between a group modules.\n";
print " Can be used to detect link errors which will occur at insmod time\n";
print " and verify the expected link dependencies between them.\n";
print " Any symbols which cannot be linked between the set of given files\n";
print " will be displayed.\n\n";
print "Usage: lmsc.pl [option=value]* <file0> <file1> <file2> ...\n";
print " lmsc.pl [option=value]* <file0>,<file1>,<file2> <file1,file10> ..\n";
print " Options:\n";
print " kernel=<filename> This image is included in all file sets.\n";
print " ferror=<num> Specify exit code when files cannot be found.\n";
print " Allows recovery in makefiles.\n";
exit(2);
}
use File::Basename;
#
# File aliases. Mainly used to reference kernel images
#
my $KERN_PATH="/projects/ntsw-sw/linux/kernels";
my $KERN_BASE="$KERN_PATH/vmlinux";
my $KERN_TOOLS_PATH="/projects/ntsw-tools/linux/kernels";
my $KERN_TOOLS_BASE="$KERN_TOOLS_PATH/vmlinux";
my $KERN_REL=`uname -r`;
# Strip trailing whitespace
$KERN_REL =~ s/\s+$//;
my %ALIASES = (
"bmw-2_6", "$KERN_BASE.bmw-2.6wrs",
"gto-2_6", "$KERN_BASE.gto-2.6wrs",
"gto-2_6-wr30", "$KERN_BASE.gto-2.6wrs30",
"gto", "$KERN_BASE.gto",
"gtr-3_7", "$KERN_PATH/wrx/xlp316/vmlinux",
"gtx-2_6", "$KERN_BASE.gtx-2.6wrl",
"iproc-2_6", "$KERN_TOOLS_PATH/../iproc_ldks/ldk26/iproc/kernel/linux-custom/vmlinux",
"iproc-3_6", "$KERN_TOOLS_PATH/../iproc_ldks/ldk36/iproc/kernel/linux-custom/vmlinux",
"iproc-4_4", "$KERN_TOOLS_PATH/../iproc_ldks/xldk40/XLDK/kernel/linux/vmlinux",
"iproc", "$KERN_TOOLS_PATH/../iproc_ldks/iproc/XLDK32/kernel/linux/vmlinux",
"iproc_64", "$KERN_TOOLS_PATH/../iproc_ldks/iproc/XLDK64/kernel/linux/vmlinux",
"jag", "$KERN_BASE.jag",
"jag-2_6", "$KERN_BASE.jag-2.6wrs",
"keystone-2_6", "$KERN_BASE.key_be-26wrs",
"keystone_le-2_6", "$KERN_BASE.key_le-26wrs",
"nsx", "$KERN_TOOLS_BASE.nsx",
"nsx-2_6", "$KERN_BASE.nsx-2.6",
"nsx_wrl-2_6", "$KERN_BASE.nsx-2.6wrs",
"raptor", "$KERN_BASE.raptor",
"raptor-2_6", "$KERN_BASE.raptor-2.6wrs",
"wrx-3_7", "$KERN_PATH/wrx/xlp208/vmlinux",
"x86-generic-2_6", "/lib/modules/$KERN_REL/source/vmlinux",
"x86-generic_64-2_6", "/lib/modules/$KERN_REL/source/vmlinux",
"x86-smp_generic-2_6", "/lib/modules/$KERN_REL/source/vmlinux",
"x86-smp_generic_64-2_6", "/lib/modules/$KERN_REL/source/vmlinux",
);
#
# All of the sets of files which require dependency/link checking
#
my @FILESETS;
#
# All defined symbols, by file
my %DEFINED;
#
# All undefined symbols, by file
#
my %UNDEFINED;
#
# Simple command line options
#
my @FILES;
my $KERNEL;
my $FERROR = -1;
if(@ARGV==0) {
help();
}
foreach (@ARGV) {
if(/kernel=(.*)/) {
# Specifies a kernel name that should be prepended to all sets
$KERNEL = $1;
next;
}
if(/ferror=(.*)/) {
# Override exit code if file does not exist
$FERROR=$1;
next;
}
if(/^(--help|-h|--h|help)/) {
help();
}
push @FILES, $_;
}
#
# A single set of files can be specified as "file0 file1 file2..." on the command line.
# Multiple sets of files can be specified as "file0,file1,file2 file0,file2,file3..." on the command line.
#
if((grep { /,/ } @FILES) == 0) {
# Assume all arguments are in the same fileset
push @FILESETS, join(",", @FILES);
}
else {
# Each argument represents a separate fileset
push @FILESETS, @FILES;
}
#
# Add the kernel image if specified
#
if(defined($KERNEL)) {
map { $_ = "$KERNEL,$_"; } @FILESETS;
}
#
# Load all file symbols
#
load_all_symbols();
#
# Check all filesets and set exit code
#
exit(check_all());
################################################################################
#
# Check for filename aliases
#
sub alias
{
my $file = shift;
return defined($ALIASES{$file}) ? $ALIASES{$file} : $file;
}
#
# Check All Filesets
#
sub check_all
{
my $count;
foreach my $fs (@FILESETS) {
$count += check_fs(split /,/,$fs);
}
return $count == 0 ? 0 : -1;
}
#
# Check a single fileset
#
sub check_fs
{
my @files = @_;
my @defined;
# Get all defined symbols for this fileset
foreach (@files) {
push @defined, @{$DEFINED{alias($_)}};
}
# Check
my @pfiles = map { basename($_); } @files;
printf("Link Check: @pfiles: ");
my $count = 0;
foreach my $file (@files) {
foreach my $sym (@{$UNDEFINED{$file}}) {
if($sym =~ /^__crc_(.+)/) {
$sym = $1;
}
if((grep { $_ eq $sym } @defined) == 0) {
printf("\n *** %s: $sym", basename($file));
$count++;
}
}
}
if($count == 0) {
printf("OK");
}
printf("\n");
return $count;
}
#
# Load all defined and undefined symbols for a file
#
sub load_all_symbols
{
foreach my $fs (@FILESETS) {
foreach my $file (split /,/, $fs) {
#
# Check for aliases
#
$file = alias($file);
# Load the defined symbols if we haven't already
if(!defined($DEFINED{$file})) {
#
# File exist?
#
if(!(-e $file)) {
printf("Warning: Cannot find file '$file'. Link check not performed.\n");
exit($FERROR);
}
#
# Get all defined symbols
#
my @defined = nm($file, "--defined-only");
#
# If these are 2.6 modules, only the ksymtab symbols are actually available for linking.
#
if(grep { /__ksymtab_/ } @defined) {
@defined = grep { s/__ksymtab_// } @defined;
}
$DEFINED{$file} = \@defined;
}
# Load the undefined symbols if we haven't already
if(!defined($UNDEFINED{$file})) {
my @undefined = nm($file, "--undefined-only");
$UNDEFINED{$file} = \@undefined;
}
}
}
}
#
# Use 'nm' to retrieve symbols
#
sub nm
{
my ($file, @flags) = @_;
my @symbols;
foreach my $nm_output (`/tools/bin/nm @flags $file`) {
my @lines = split(/\n/, $nm_output);
foreach(@lines) {
@str = split / / ;
$symbol = $str[$#str];
# Magic symbols
if($symbol =~ /(__start|__stop)___kallsyms/) {
next;
}
if($symbol =~ /__this_module/) {
next;
}
if($symbol =~ /__stack_chk_/) {
next;
}
if($symbol =~ /mcount/) {
next;
}
push @symbols, $symbol;
}
}
chomp @symbols;
return @symbols;
}
| 24.108553 | 133 | 0.553418 |
edbb8a7e8681071c77ed857767033e8679c137b0 | 2,076 | pm | Perl | auto-lib/Paws/ElasticTranscoder/ReadJob.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/ElasticTranscoder/ReadJob.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/ElasticTranscoder/ReadJob.pm | galenhuntington/aws-sdk-perl | 13b775dcb5f0b3764f0a82f3679ed5c7721e67d3 | [
"Apache-2.0"
] | null | null | null |
package Paws::ElasticTranscoder::ReadJob;
use Moose;
has Id => (is => 'ro', isa => 'Str', traits => ['ParamInURI'], uri_name => 'Id', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'ReadJob');
class_has _api_uri => (isa => 'Str', is => 'ro', default => '/2012-09-25/jobs/{Id}');
class_has _api_method => (isa => 'Str', is => 'ro', default => 'GET');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::ElasticTranscoder::ReadJobResponse');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ElasticTranscoder::ReadJob - Arguments for method ReadJob on L<Paws::ElasticTranscoder>
=head1 DESCRIPTION
This class represents the parameters used for calling the method ReadJob on the
L<Amazon Elastic Transcoder|Paws::ElasticTranscoder> service. Use the attributes of this class
as arguments to method ReadJob.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to ReadJob.
=head1 SYNOPSIS
my $elastictranscoder = Paws->service('ElasticTranscoder');
my $ReadJobResponse = $elastictranscoder->ReadJob(
Id => 'MyId',
);
# Results:
my $Job = $ReadJobResponse->Job;
# Returns a L<Paws::ElasticTranscoder::ReadJobResponse> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://aws.amazon.com/documentation/>
=head1 ATTRIBUTES
=head2 B<REQUIRED> Id => Str
The identifier of the job for which you want to get detailed
information.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method ReadJob in L<Paws::ElasticTranscoder>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 30.985075 | 249 | 0.712428 |
eda82759666a9f31f65707b30a607d3e9187d676 | 2,340 | pm | Perl | auto-lib/Paws/WAF/GetSizeConstraintSet.pm | shogo82148/aws-sdk-perl | a87555a9d30dd1415235ebacd2715b2f7e5163c7 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/WAF/GetSizeConstraintSet.pm | shogo82148/aws-sdk-perl | a87555a9d30dd1415235ebacd2715b2f7e5163c7 | [
"Apache-2.0"
] | 1 | 2021-05-26T19:13:58.000Z | 2021-05-26T19:13:58.000Z | auto-lib/Paws/WAF/GetSizeConstraintSet.pm | shogo82148/aws-sdk-perl | a87555a9d30dd1415235ebacd2715b2f7e5163c7 | [
"Apache-2.0"
] | null | null | null |
package Paws::WAF::GetSizeConstraintSet;
use Moose;
has SizeConstraintSetId => (is => 'ro', isa => 'Str', required => 1);
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'GetSizeConstraintSet');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::WAF::GetSizeConstraintSetResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::WAF::GetSizeConstraintSet - Arguments for method GetSizeConstraintSet on L<Paws::WAF>
=head1 DESCRIPTION
This class represents the parameters used for calling the method GetSizeConstraintSet on the
L<AWS WAF|Paws::WAF> service. Use the attributes of this class
as arguments to method GetSizeConstraintSet.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to GetSizeConstraintSet.
=head1 SYNOPSIS
my $waf = Paws->service('WAF');
# To get a size constraint set
# The following example returns the details of a size constraint match set
# with the ID example1ds3t-46da-4fdb-b8d5-abc321j569j5.
my $GetSizeConstraintSetResponse = $waf->GetSizeConstraintSet(
'SizeConstraintSetId' => 'example1ds3t-46da-4fdb-b8d5-abc321j569j5' );
# Results:
my $SizeConstraintSet = $GetSizeConstraintSetResponse->SizeConstraintSet;
# Returns a L<Paws::WAF::GetSizeConstraintSetResponse> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/waf/GetSizeConstraintSet>
=head1 ATTRIBUTES
=head2 B<REQUIRED> SizeConstraintSetId => Str
The C<SizeConstraintSetId> of the SizeConstraintSet that you want to
get. C<SizeConstraintSetId> is returned by CreateSizeConstraintSet and
by ListSizeConstraintSets.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method GetSizeConstraintSet in L<Paws::WAF>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 34.411765 | 249 | 0.749573 |
ed6208eb9ec9237495617006929098d057451582 | 2,514 | pm | Perl | lib/Google/Ads/AdWords/v201809/ImageError.pm | googleads/googleads-perl-lib | 69e66d7e46fbd8ad901581b108ea6c14212701cf | [
"Apache-2.0"
] | 4 | 2015-04-23T01:59:40.000Z | 2021-10-12T23:14:36.000Z | lib/Google/Ads/AdWords/v201809/ImageError.pm | googleads/googleads-perl-lib | 69e66d7e46fbd8ad901581b108ea6c14212701cf | [
"Apache-2.0"
] | 23 | 2015-02-19T17:03:58.000Z | 2019-07-01T10:15:46.000Z | lib/Google/Ads/AdWords/v201809/ImageError.pm | googleads/googleads-perl-lib | 69e66d7e46fbd8ad901581b108ea6c14212701cf | [
"Apache-2.0"
] | 10 | 2015-08-03T07:51:58.000Z | 2020-09-26T16:17:46.000Z | package Google::Ads::AdWords::v201809::ImageError;
use strict;
use warnings;
__PACKAGE__->_set_element_form_qualified(1);
sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201809' };
our $XML_ATTRIBUTE_CLASS;
undef $XML_ATTRIBUTE_CLASS;
sub __get_attr_class {
return $XML_ATTRIBUTE_CLASS;
}
use base qw(Google::Ads::AdWords::v201809::ApiError);
# Variety: sequence
use Class::Std::Fast::Storable constructor => 'none';
use base qw(Google::Ads::SOAP::Typelib::ComplexType);
{ # BLOCK to scope variables
my %fieldPath_of :ATTR(:get<fieldPath>);
my %fieldPathElements_of :ATTR(:get<fieldPathElements>);
my %trigger_of :ATTR(:get<trigger>);
my %errorString_of :ATTR(:get<errorString>);
my %ApiError__Type_of :ATTR(:get<ApiError__Type>);
my %reason_of :ATTR(:get<reason>);
__PACKAGE__->_factory(
[ qw( fieldPath
fieldPathElements
trigger
errorString
ApiError__Type
reason
) ],
{
'fieldPath' => \%fieldPath_of,
'fieldPathElements' => \%fieldPathElements_of,
'trigger' => \%trigger_of,
'errorString' => \%errorString_of,
'ApiError__Type' => \%ApiError__Type_of,
'reason' => \%reason_of,
},
{
'fieldPath' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'fieldPathElements' => 'Google::Ads::AdWords::v201809::FieldPathElement',
'trigger' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'errorString' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'ApiError__Type' => 'SOAP::WSDL::XSD::Typelib::Builtin::string',
'reason' => 'Google::Ads::AdWords::v201809::ImageError::Reason',
},
{
'fieldPath' => 'fieldPath',
'fieldPathElements' => 'fieldPathElements',
'trigger' => 'trigger',
'errorString' => 'errorString',
'ApiError__Type' => 'ApiError.Type',
'reason' => 'reason',
}
);
} # end BLOCK
1;
=pod
=head1 NAME
Google::Ads::AdWords::v201809::ImageError
=head1 DESCRIPTION
Perl data type class for the XML Schema defined complexType
ImageError from the namespace https://adwords.google.com/api/adwords/cm/v201809.
Error class for errors associated with parsing image data.
=head2 PROPERTIES
The following properties may be accessed using get_PROPERTY / set_PROPERTY
methods:
=over
=item * reason
=back
=head1 METHODS
=head2 new
Constructor. The following data structure may be passed to new():
=head1 AUTHOR
Generated by SOAP::WSDL
=cut
| 19.795276 | 81 | 0.661098 |
edd601f5b3aec302661320d5b43c55b52ae92390 | 3,849 | t | Perl | test/test_hierarchy/hierarchy.t | ajaytho/dxtoolkit | 1d9948d67b9bdd6cff07dc2170d89af6d16e9de8 | [
"Apache-2.0"
] | null | null | null | test/test_hierarchy/hierarchy.t | ajaytho/dxtoolkit | 1d9948d67b9bdd6cff07dc2170d89af6d16e9de8 | [
"Apache-2.0"
] | null | null | null | test/test_hierarchy/hierarchy.t | ajaytho/dxtoolkit | 1d9948d67b9bdd6cff07dc2170d89af6d16e9de8 | [
"Apache-2.0"
] | null | null | null | use strict;
use Data::Dumper;
use Test::More tests => 5;
use Test::Script;
use LWP::UserAgent;
use lib '../../lib/';
use lib '../';
use lib '.';
use server;
my $server = server->new(8080);
$server->set_dir('landshark');
$server->host('127.0.0.1');
$server->background();
my $server1 = server->new(8082);
$server1->set_dir('Delphix32');
$server1->host('127.0.0.1');
$server1->background();
my $server2 = server->new(8083);
$server2->set_dir('Delphix33');
$server2->host('127.0.0.1');
$server2->background();
# $server->set_dir('dupazbita');
# my $ala = $server->get_dir();
#
# print Dumper $ala;
#
# exit(1);
script_compiles('../../bin/dx_get_hierarchy.pl');
script_runs(['../../bin/dx_get_hierarchy.pl', '-d', 'local', '-format','csv','-nohead'] , "All hierachy test");
my $expected_stdout = <<EOF;
local,autofs,Analytics,VDB,TESTEBI,2017-04-06 13:16:37 IST,TESTEBI,autofs
local,autotest,Analytics,VDB,Sybase dsource,2017-05-05 14:43:00 EDT,pubs3,autotest
local,mstest_lsn,Analytics,VDB,AdventureWorksLT2008R2,78000000037201000,AdventureWorksLT2008R2,mstest_lsn
local,mstest_time,Analytics,VDB,AdventureWorksLT2008R2,2017-04-24 06:28:02 PDT,AdventureWorksLT2008R2,mstest_time
local,si4rac,Analytics,VDB,racdba,2016-12-23 13:23:44 UTC,racdba,si4rac
local,siclone,Analytics,VDB,racdba,2016-12-23 13:23:44 UTC,racdba,si4rac
local,targetcon,Analytics,CDB,,N/A,targetcon,N/A
local,vPDBtest,Analytics,VDB,PDB,2017-05-22 11:17:44 EDT,PDB,vPDBtest
local,AdventureWorksLT2008R2,Sources,dSource,,N/A,AdventureWorksLT2008R2,N/A
local,Oracle dsource,Sources,dSource,,N/A,orcl,N/A
local,PDB,Sources,dSource,,N/A,PDB,N/A
local,racdba,Sources,dSource,,N/A,racdba,N/A
local,singpdb,Sources,CDB,,N/A,singpdb,N/A
local,Sybase dsource,Sources,dSource,,N/A,pubs3,N/A
local,TESTEBI,Sources,dSource,,N/A,TESTEBI,N/A
local,VOracledsource_F0C,Tests,VDB,Oracle dsource,2017-06-06 08:58:12 EDT,orcl,VOracledsource_F0C
EOF
script_stdout_is $expected_stdout, "All hierachy results compare";
script_runs(['../../bin/dx_get_hierarchy.pl', '-d', 'local33', '-format','csv','-nohead','-parent_engine','local32'] , "2 engine test");
my $expected_stdout = <<EOF;
local33,racdb\@delphix32-2,Sources\@delphix32-2,dSource,,N/A,racdb,N/A
local33,sybase1mask\@delphix32-7,Test\@delphix32-7,VDB,piorotest,2017-03-08 14:35:22 GMT,piorotest,sybase1mask
local33,maskedms\@delphix32-9,Test\@delphix32-9,VDB,tpcc,2017-03-08 17:35:00 GMT,tpcc,maskedms
local33,mask\@delphix32,Test\@delphix32,VDB,test1,2017-05-30 11:11:27 IST,test1,man
local33,cloneMSmas,Untitled,VDB,parent deleted,N/A - timeflow deleted,N/A,N/A
local33,mask1clone,Untitled,VDB,parent deleted,N/A - timeflow deleted,N/A,N/A
local33,mask1clone2,Untitled,VDB,parent deleted,N/A - timeflow deleted,N/A,N/A
local33,mask1clone3,Untitled,VDB,test1,2017-05-30 11:11:27 IST,test1,man
local33,maskclone,Untitled,VDB,parent deleted,N/A - timeflow deleted,N/A,N/A
local33,mssql2clone,Untitled,VDB,tpcc,2017-03-08 17:35:00 GMT,tpcc,maskedms
local33,sybase1clone,Untitled,VDB,piorotest,2017-03-08 14:35:22 GMT,piorotest,sybase1mask
local33,Vracdb_70C,Untitled,VDB,racdb\@delphix32-2,2016-09-28 14:57:16 UTC,racdb,Vracdb_70C
EOF
script_stdout_is $expected_stdout, "2 engine results compare";
#stop server
my $ua = LWP::UserAgent->new;
$ua->agent("Delphix Perl Agent/0.1");
$ua->timeout(15);
my $request = HTTP::Request->new(GET => 'http://127.0.0.1:8080/stop');
my $response = $ua->request($request);
#stop server
my $ua = LWP::UserAgent->new;
$ua->agent("Delphix Perl Agent/0.1");
$ua->timeout(15);
my $request = HTTP::Request->new(GET => 'http://127.0.0.1:8082/stop');
my $response = $ua->request($request);
#stop server
my $ua = LWP::UserAgent->new;
$ua->agent("Delphix Perl Agent/0.1");
$ua->timeout(15);
my $request = HTTP::Request->new(GET => 'http://127.0.0.1:8083/stop');
my $response = $ua->request($request);
| 37.368932 | 137 | 0.739932 |
ed4483c7ffa152e225575e96a6af64f9d2f775e6 | 18,048 | pl | Perl | weblogen/backend/matcher_swi.pl | leuschel/logen | 0ea806f54628162615e25177c3ed98f6b2c27935 | [
"Apache-2.0"
] | 14 | 2015-10-16T11:35:30.000Z | 2021-05-12T15:31:16.000Z | weblogen/backend/matcher_swi.pl | leuschel/logen | 0ea806f54628162615e25177c3ed98f6b2c27935 | [
"Apache-2.0"
] | null | null | null | weblogen/backend/matcher_swi.pl | leuschel/logen | 0ea806f54628162615e25177c3ed98f6b2c27935 | [
"Apache-2.0"
] | 5 | 2015-10-16T12:44:41.000Z | 2019-10-02T02:45:38.000Z | :- module(matcher_swi, [get_syntax_positions/2,
get_ann_positions/4,
get_pos_parse_tree/3,
collect_filters/3,
collect_filters_from_list/4,
test/1]).
:- include(op_decl_swi).
:- use_module('../../annloader').
:- use_module('../../annmeta').
:- use_module('../../gximports/generalise_and_filter').
:- use_module(prolog_to_xml_swi).
:- use_module(library(memfile)).
:- dynamic correspond_ann_clause/2.
:- dynamic filter_fix/3.
test(Anns) :- get_ann_positions('match.pl', 'match.pl.ann', Anns).
get_ann_positions(Plfile, Annfile, Anns, Extras) :-
load_annfile(Annfile),
process_extras(Extras),
assert_ann_clauses,
open(Plfile, read, Stream),
match_anns(Stream, Anns),
close(Stream).
process_extras([]).
process_extras([E|Es]) :- process_extra(E), process_extras(Es).
get_unsafe_clause(Pred, Arity, ClauseNo, Clause) :-
integer(Arity),
integer(ClauseNo),
findall(ann_clause(Id, H, A), (ann_clause(Id, H, A), functor(H, Pred, Arity)), Clauses),
nth1(ClauseNo, Clauses, Clause).
% find the annotated clause Pred/Arity (number ClauseNo) and change the
% annotation pointed to by Path to Ann
process_extra(extra(Ann, Pred, Arity, ClauseNo, Path)) :-
get_unsafe_clause(Pred, Arity, ClauseNo, ann_clause(P1, H1, Clause)),
follow_path_and_change(Clause, Path, Ann, NClause, _),
retract(ann_clause(P1, H1, Clause)),
assert(ann_clause(P1, H1, NClause)).
process_extra(fixable(Ann, Pred, Arity, ClauseNo, Path, Fix)) :-
get_unsafe_clause(Pred, Arity, ClauseNo, ann_clause(P1, H1, Clause)),
follow_path_and_change(Clause, Path, Ann+Fix, NClause, _),
retract(ann_clause(P1, H1, Clause)),
assert(ann_clause(P1, H1, NClause)).
process_extra(fix_hide_nf(Pred, Arity, ClauseNo, Path)) :-
get_unsafe_clause(Pred, Arity, ClauseNo, ann_clause(P1, H1, Clause)),
follow_path_and_add(Clause, Path, fix_hide_nf, NClause),
retract(ann_clause(P1, H1, Clause)),
assert(ann_clause(P1, H1, NClause)).
process_extra(fixable_filter(Pred, Arity, Arg)) :-
assert(filter_fix(Pred/Arity, Arg, 'dynamic')).
% should perhaps throw an exception
process_extra(extra(_, P, N, _)) :- format(user_error, 'Clause ~w number ~w does not exist~n.', [P, N]).
% other extras are not relevant here, so ignore
process_extra(_).
get_index([A1, A2, A3], 1, A1, [B1, A2, A3], B1).
get_index([A1, A2, A3], 2, A2, [A1, B2, A3], B2).
get_index([A1, A2, A3], 3, A3, [A1, A2, B3], B3).
get_index([A1, A2], 1, A1, [B1, A2], B1).
get_index([A1, A2], 2, A2, [A1, B2], B2).
get_index([A1], 1, A1, [B1], B1).
% This clause matches when Fix is _, which is useful when the fix is to revert
% the clause back to its old state, when the real change happens elsewhere
%final parameter passes back the part that got changed (actually a copy)
follow_path_and_change(logen(OldAnn, A), [], Ann+Fix, logen(Ann+OldAnn, A),
logen(Ann+OldAnn, A)) :-
var(Fix).
follow_path_and_change(logen(_, A), [], Ann, logen(Ann, A), logen(Ann, A)).
follow_path_and_change(HO, [(H,I)|Path], Ann, NHO, Changed) :-
annmeta(load, HO, NHO, Args, NArgs),
functor(HO, H, _),
get_index(Args, I, Arg, NArgs, NArg),
follow_path_and_change(Arg, Path, Ann, NArg, Changed).
% follow the path and then wrap the result in Fix. Used to put hide_nf around
% something.
follow_path_and_add(A, [], Fix, NA) :-
NA =.. [Fix, A].
follow_path_and_add(HO, [(H,I)|Path], Fix, NHO) :-
annmeta(load, HO, NHO, Args, NArgs),
functor(HO, H, _),
get_index(Args, I, Arg, NArgs, NArg),
follow_path_and_add(Arg, Path, Fix, NArg).
assert_ann_clauses :-
ann_clause(_, H, AB),
convert_ann_to_source(H, AB, T),
assert(correspond_ann_clause(T, (H:-AB))),
fail.
assert_ann_clauses.
convert_ann_to_source(H, true, H) :- !.
convert_ann_to_source(H, B, (H:-T)) :-
strip_ann(B, T), !.
strip_ann(logen(_, A), _) :-
var(A), !,
throw('Free variable annotated directly. Use call(Var) instead.').
strip_ann(logen(_,(_,_)), _) :-
!,
throw('Conjunction annotated as if it was a simple predicate. To fix convert annotations of form logen(call, (_,_)) to logen(call, _), logen(call, _).').
strip_ann(logen(_, A), A).
strip_ann(fix_hide_nf(B), S) :-
strip_ann(B, S).
strip_ann(A, S) :-
annotation_matcher_data(A, S, MapFrom, MapTo),
strip_ann_list(MapFrom, MapTo).
strip_ann_list([], []).
strip_ann_list([A|As], [S|Ss]) :-
strip_ann(A, S),
strip_ann_list(As, Ss).
get_pos_parse_tree(Filename, T, PT) :-
open(Filename, read, Stream),
positional_parse(Stream, T, PT),
close(Stream).
positional_parse(Stream, Ts, PTs) :-
read_term(Stream, T, [subterm_positions(P)]),
(T == end_of_file ->
(Ts = [], PTs = [])
;
(
positional_parse(Stream, T2, PT2), !,
Ts = [T|T2], PTs = [P|PT2]
)
).
get_filter_highlights(Stream, Highlights, Extras) :-
read_term(Stream, T, [subterm_positions(P)]),
(T == end_of_file ->
Highlights = []
;
(
get_filter_highlights(Stream, H2, Extras), !,
get_highlights_from_tree(T, P, H, Extras),
append(H, H2, Highlights)
)
).
get_functor_pos_and_args(term_position(_,_,S,E,PArgs),S,E,PArgs).
get_functor_pos_and_args(S-E,S,E,[]).
% highlights bad filters with no fixes
get_highlights_from_tree(:-(filter(Filt)),
term_position(S1,E1,_,_,[term_position(_,_,S2,E2,[T])]),
[ann(S1,E1plus2,wholefilter), ann(S1, E1, badfilter),
ann(S2,E2,directive/Arity), ann(S,E,(filter)/Arity)|Highlights],
Extras) :-
functor(Filt, Name, Arity),
member(filter(Name,Arity), Extras),
!, E1plus2 is E1 + 2,get_functor_pos_and_args(T,S,E,PArgs),
Filt =.. [_|Args],
get_filter_args(Args, PArgs, Highlights, Name, Arity).
% highlights bad filters with fixes
get_highlights_from_tree(:-(filter(Filt)),
term_position(S1,E1,_,_,[term_position(_,_,S2,E2,[T])]),
[ann(S1,E1plus2,wholefilter), ann(S1, E1, badfilter*Replacement),
ann(S2,E2,directive/Arity), ann(S,E,(filter)/Arity)|Highlights],
Extras) :-
functor(Filt, Name, Arity),
member(replace_filter(Name,Arity,Replacement), Extras),
!, E1plus2 is E1 + 2,get_functor_pos_and_args(T,S,E,PArgs),
Filt =.. [_|Args],
get_filter_args(Args, PArgs, Highlights, Name, Arity).
get_highlights_from_tree(:-(filter(Filt)),
term_position(S1,E1,_,_,[term_position(_,_,S2,E2,[T])]),
[ann(S1,E1plus2,wholefilter), ann(S2,E2,directive/Arity),
ann(S,E,(filter)/Arity)|Highlights], _) :-
% S1 - E1 includes the whole term except for the '.\n', so since we know how it
% was printed we can just cheat to include it
!, E1plus2 is E1 + 2,get_functor_pos_and_args(T,S,E,PArgs),
Filt =.. [_|Args],
length(PArgs, Arity),
functor(Filt, Name, Arity),
get_filter_args(Args, PArgs, Highlights, Name, Arity).
get_highlights_from_tree(_, _, [], _).
get_filter_args(Args, PArgs, Highlights, Name, Arity) :-
%(filter_fix(Name/Arity, FixArg, FixType) -> true ; FixArg = -1),
get_filter_args2(Args, PArgs, Highlights, 1, Name/Arity).
get_filter_args2([], [], [], _, _).
get_filter_args2([A|As], [S-E|Ps], [ann(S, E, A2)|Hs], Arg, Pred) :-
get_filter_arg_highlight(A), !, Arg2 is Arg + 1,
(filter_fix(Pred, Arg, FixType) -> A2 = unsafe+FixType ; A2 = A),
get_filter_args2(As, Ps, Hs, Arg2, Pred).
get_filter_args2([_|As], [term_position(S,E,_,_,_)|Ps],
[ann(S, E, A2)|Hs], Arg, Pred) :-
(filter_fix(Pred, Arg, FixType) -> A2 = unsafe+FixType ; A2 = complex),
Arg2 is Arg + 1, get_filter_args2(As, Ps, Hs, Arg2, Pred).
get_filter_arg_highlight(X) :- basic_binding_type(X).
get_syntax_positions(Filename, Syntax) :-
open(Filename, read, Stream),
get_syntax_read(Stream, Syntax),
close(Stream).
get_syntax_read(Stream, Syntax) :-
read_term(Stream, T, [subterm_positions(P)]),
(T == end_of_file ->
Syntax = []
;
(
get_syntax_read(Stream, Syn2), !,
get_syntax_from_tree(T, P, Syn),
append(Syn, Syn2, Syntax)
)
).
get_functor_pos(S-E, S, E).
get_functor_pos(term_position(_, _, S, E, _), S, E).
get_list_pos_head(S-E, [ann(S, E, head/0)]).
get_list_pos_head(term_position(_, _, S, E, Args), [ann(S, E, head/Arity)|ListPos]) :-
!, length(Args, Arity), get_list_pos_from_list(Args, ListPos).
get_list_pos_head(T, L) :- get_list_pos(T, L).
get_list_pos(S-E, [ann(S, E, atom)]).
%get_list_pos(_-_, []).
get_list_pos(string_position(S,E), [ann(S, E, string)]).
% the following highlights like this: _{_ .... _}_
% perhaps it should do _{ ... }_
get_list_pos(brace_term_position(S,E,Arg), [ann(S, S1, brace)|List]) :-
get_list_pos(Arg, L),
S1 is S + 1, Em1 is E - 1,
append(L, [ann(Em1, E, brace)], List).
get_list_pos(list_position(Begin, End, Args, _),
[ann(Begin, Begin1, list)|ListPos]) :-
get_list_pos_from_list(Args, List),
Begin1 is Begin + 1, Endm1 is End - 1,
append(List, [ann(Endm1, End, list)], ListPos).
get_list_pos(term_position(_, _, S, E, Args), [ann(S, E, atom)|ListPos]) :-
get_list_pos_from_list(Args, ListPos).
get_list_pos_from_list([], []).
get_list_pos_from_list([H|T], ListPos) :-
get_list_pos(H, PH),
get_list_pos_from_list(T, PT),
append(PH, PT, ListPos).
get_syntax_from_tree(:-(_,_), term_position(_,_,_,_,[F, R]), ListPos) :-
!,
get_list_pos_head(F, List1),
get_list_pos(R, List2),
append(List1, List2, ListPos).
get_syntax_from_tree(_, term_position(S,_,S,E,Args), [ann(S, E, head/Arity)|ListPos]) :-
!, get_list_pos_from_list(Args, ListPos), length(Args, Arity).
get_syntax_from_tree(_, S-E, [ann(S, E, head/0)]) :- !.
get_syntax_from_tree(_, L, ListPos) :- get_list_pos(L, ListPos).
canon(A, In, Out) :- nonvar(A), !, canon2(A, In, Out).
canon(A, In, Out) :- conjunct(A, In, Out).
canon2(A, In, Out) :-
A = (B,C), !,
canon(C, In, R),
canon(B, R, Out).
canon2(A, In, Out) :-
A =.. [F|Args],
canon_list(Args, NArgs),
B =.. [F|NArgs],
conjunct(B, In, Out).
canon_list([], []).
canon_list([A|As], [B|Bs]) :-
canon(A, empty, nonempty(B)),
canon_list(As, Bs).
conjunct(A, empty, nonempty(A)).
conjunct(A, nonempty(B), nonempty((A, B))).
match_anns(Stream, Anns) :-
read_term(Stream, T, [subterm_positions(P)]),
(T == end_of_file ->
Anns = []
;
(
get_corresponding_anns(T, P, NAnns),
match_anns(Stream, Anns2), !,
get_syntax_from_tree(T, P, Syn),
append(Syn, Anns2, Anns3),
append(NAnns, Anns3, Anns)
)
).
% matches directly
get_corresponding_anns(T, P, Anns) :-
correspond_ann_clause(T, AnnClause), !,
get_anns_from_clause(P, AnnClause, Anns).
% try fuzzy match to get ones hidden by hide_nf conjunction reordering
get_corresponding_anns(T, P, Anns) :-
fuzzy_match(T, P, NP, AnnClause),
get_anns_from_clause(NP, AnnClause, Anns).
% match fails -> mark with unknowns
get_corresponding_anns(T, P, Anns) :-
annotate_unknown_clause(T, P, Anns).
annotate_unknown_clause((:-_), _, []).
annotate_unknown_clause((_:-T), term_position(_, _, _, _, [_, PT]), Anns) :-
annotate_unknown(T, PT, Anns).
annotate_unknown_clause(_, _, []).
annotate_unknown(T, P, Anns) :-
term_to_position(T, P, Pos, TMap, PMap, Unknown),
(Pos = [S,E] -> Anns = [ann(S,E,Unknown)|NAnns] ; Anns = NAnns),
annotate_unknown_list(TMap, PMap, NAnns).
annotate_unknown_list([], [], []).
annotate_unknown_list([T|Ts], [P|Ps], Anns) :-
annotate_unknown(T, P, A),
annotate_unknown_list(Ts, Ps, As),
append(A, As, Anns).
get_anns_from_clause(term_position(_, _, _, _, Args), (_:-B), NAnns) :-
!, get_anns_from_body(Args, B, NAnns).
get_anns_from_clause(_P, _AnnClause, NAnns) :-
%portray_clause(P),
%portray_clause(AnnClause),
NAnns = [].
get_anns_from_body(_, true, []) :- !.
get_anns_from_body([_,A], B, Anns) :- !,
%portray_clause(A),
%portray_clause(B),
get_anns_from_body2(A, B, Anns).
get_anns_from_body2(P, fix_hide_nf(A), [ann(S,E,fix_hide_nf)|Anns2]) :-
ann_to_position(hide_nf(A), P, As, Ps, _, [S,E], _),
get_anns_from_body2_list(Ps, As, Anns2).
get_anns_from_body2(P, AnnClause, Anns) :-
ann_to_position(AnnClause, P, As, Ps, Ann, Pos, Arity),
(Pos = [S,E] -> Anns = [ann(S,E,Ann/Arity)|Anns2] ; Anns = Anns2),
get_anns_from_body2_list(Ps, As, Anns2).
get_anns_from_body2_list([], [], []).
get_anns_from_body2_list([P|Ps], [A|As], Anns) :-
get_anns_from_body2(P, A, Anns1),
get_anns_from_body2_list(Ps, As, Anns2),
append(Anns1, Anns2, Anns).
% arg 1 is an open stream containing the filters
% (likely to be a memory file although it could be something else)
% arg 2 is the formatting for it
% arg 3 contains info for extra highlighting
collect_filters(ReturnStream, FilSyntax, Extras) :-
new_memory_file(TmpFile),
open_memory_file(TmpFile, write, TmpOut),
write_filters(TmpOut),
close(TmpOut),
open_memory_file(TmpFile, read, TmpIn),
get_filter_highlights(TmpIn, FilSyntax, Extras),
close(TmpIn),
open_memory_file(TmpFile, read, ReturnStream).
% this is the same as above but the filters are contained in a list (Filters)
% and aren't extracted from the ann_decl db.
collect_filters_from_list(ReturnStream, FilSyntax, Extras, Filters) :-
new_memory_file(TmpFile),
open_memory_file(TmpFile, write, TmpOut),
write_filters_from_list(TmpOut, Filters),
close(TmpOut),
open_memory_file(TmpFile, read, TmpIn),
get_filter_highlights(TmpIn, FilSyntax, Extras),
close(TmpIn),
open_memory_file(TmpFile, read, ReturnStream).
list([]).
list([_|As]) :- list(As).
prec(X, 0) :- var(X), !.
prec(type(_), 0) :- !.
prec(X, 0) :- atomic(X), !.
prec(X, 0) :- list(X), !.
prec(X, P) :- functor(X, Op, _), current_op(P, _, Op), !.
prec(_, 0).
prefix(fx).
prefix(fy).
args(fx, [x]).
args(fy, [y]).
args(xf, [x]).
args(yf, [y]).
args(xfx, [x,x]).
args(xfy, [x,y]).
args(yfx, [y,x]).
args(yfy, [y,y]).
% bracket if necessary
bpp(S, X, P1, P2, x) :- P2 < P1, !, pp(S, X).
bpp(S, X, P1, P2, y) :- P2 = P1, !, pp(S, X).
bpp(S, X, _, _, _) :- print(S, '('), pp(S, X), print(S, ')').
% have to do all the work here because SWI portrays dynamic as (dynamic)!
% this is incomplete! It will probably output the wrong thing if you use
% operators that require inner brackets to maintain precedence
% OTH it appears to work with the examples I have...
pp(S, X) :- var(X),!, print(S, X).
pp(S, X) :- atomic(X),!, writeq(S, X).
pp(S, X) :- list(X),!, print(S, '['), l_pp(S, X), print(S, ']').
%pp(S, '--->'(X, Def) ) :-
% !, pp(S, X), print(S, ' --> '), pp(S, Def).
% don't print type as an operator here even though it is.
pp(S, type(X)) :- !, print(S,'type('), pp(S, X), print(S, ')').
pp(S, X) :- X=..[Op,Arg], current_op(P1, Type, Op), args(Type, [T]), !, prec(Arg, P2),
(prefix(Type) ->
print(S, Op), bpp(S, Arg, P1, P2, T)
; bpp(S, Arg, P1, P2, T), print(S, Op)).
pp(S, X) :- X=..[Op,A1,A2], current_op(P, Type, Op), args(Type, [T1, T2]), !, prec(A1, P1), prec(A2, P2),
bpp(S, A1, P, P1, T1), print(S, ' '), print(S, Op), print(S, ' '), bpp(S, A2, P, P2, T2).
pp(S, X) :- X=..[Op|Args], writeq(S, Op), print(S, '('), l_pp(S, Args), print(S, ')').
l_pp(_, []).
l_pp(S, [H]) :- !,pp(S, H).
l_pp(S, [H|T]) :- pp(S, H), print(S, ','), l_pp(S, T).
write_decl(_, module, _) :- !.
write_decl(Stream, Id, Decl) :-
format(Stream, ':- ~w~n ', [Id]), pp(Stream, Decl),
write(Stream, '.'), nl(Stream).
write_filters(Stream) :-
ann_decl(Id, Decl),
display_in_filters(Id),
write_decl(Stream, Id, Decl),
fail.
write_filters(_).
write_filters_from_list(_, []).
write_filters_from_list(Stream, [ann_decl(Id, Decl)|Fs]) :-
display_in_filters(Id),
write_decl(Stream, Id, Decl),
write_filters_from_list(Stream, Fs).
fuzzy_match((H:-B), term_position(S, E, FS, FE, [PF, PB]),
term_position(S, E, FS, FE, [PF, NPB]), AnnClause) :-
functor(B, Name, Arity),
functor(FB, Name, Arity), !,
correspond_ann_clause((H:-FB), (H:-AnnClause)),
try_match(B, FB, PB, NPB, AnnClause), !.
%try_match(Body, FBody, PBody, NPBody, AnnClause).
try_match(B, B, PB, PB, logen(_,B)).
try_match((C1, C2), FB, PB, NPB, AnnClause) :-
match_conj((C1, C2), FB, PB, NPB, AnnClause).
try_match((A->B;C), (FA->FB;FC), PB, NPB, AnnClause) :-
match_if((A,B,C), (FA,FB,FC), PB, NPB, AnnClause).
try_match(A, FA, PB, NPB, hide_nf(AnnClause)) :-
try_match(A, FA, PB, NPB, AnnClause).
try_match(A, FA, PB, NPB, fix_hide_nf(AnnClause)) :-
try_match(A, FA, PB, NPB, AnnClause).
extract_if(if(A,B,C), A, B, C).
extract_if(resif(A,B,C), A, B, C).
match_if((A,B,C), (FA, FB, FC),
term_position(S, E, CS, CE, [term_position(S1,E1,CS1,CE1,[P1,P2]),P3]),
term_position(S, E, CS, CE, [term_position(S1,E1,CS1,CE1,[NP1,NP2]),NP3]),
AnnClause) :-
!, extract_if(AnnClause, A1, A2, A3),
try_match(A, FA, P1, NP1, A1),
try_match(B, FB, P2, NP2, A2),
try_match(C, FC, P3, NP3, A3).
match_conj(C1, FC, PB, NPB, (hide_nf((A,B)), C)) :-
!, count_conjs((A,B), Count),
reorder(C1, Count, RC1, RC2, PB, term_position(S, E, CS, CE, [P1, P2])),
try_match(RC1, FC1, P1, NP1, (A,B)),
try_match(RC2, FC2, P2, NP2, C),
NPB = term_position(S, E, CS, CE, [NP1, NP2]),
FC = (FC1, FC2).
% exact copy of above one but hide_nf -> fix_hide_nf
match_conj(C1, FC, PB, NPB, (fix_hide_nf((A,B)), C)) :-
!, count_conjs((A,B), Count),
reorder(C1, Count, RC1, RC2, PB, term_position(S, E, CS, CE, [P1, P2])),
try_match(RC1, FC1, P1, NP1, (A,B)),
try_match(RC2, FC2, P2, NP2, C),
NPB = term_position(S, E, CS, CE, [NP1, NP2]),
FC = (FC1, FC2).
match_conj((C1, C2), (FC1, FC2), term_position(S,E,CS,CE,[P1,P2]),
term_position(S,E,CS,CE,[NP1, NP2]),
(A1, A2)) :-
try_match(C1, FC1, P1, NP1, A1),
try_match(C2, FC2, P2, NP2, A2).
% takes code of form (a1, a2, ...an, b) and turns it into ((a1, a2, ...an), b)
% also changes the term_position information
reorder(Conj, 0, A, B, PB, PB) :-
nonvar(Conj), Conj = (A,B).
reorder(Conj, X, (A, C), D, PB, NPB) :-
nonvar(Conj), Conj = (A,B), X1 is X - 1,
PB = term_position(S, E, CS, CE,
[P1, term_position(S1, E1, CS1, CE1, [P2, P3])]),
reorder(B, X1, C, D, P3, NP3),
NPB = term_position(S, E, CS1, CE1,
[term_position(S1, E1, CS, CE, [P1, P2]), NP3]).
% Assume first arg is always nonvar
count_conjs((A,B), Count) :-
!, count_conjs(A, C1), count_conjs(B, C2),
Count is 1 + C1 + C2.
count_conjs(hide_nf(A), Count) :-
!, count_conjs(A, Count).
count_conjs(fix_hide_nf(A), Count) :-
!, count_conjs(A, Count).
count_conjs(_, 0).
| 33.734579 | 154 | 0.656859 |
ed3cc8e1a28a2256238ee7645a40f9ed48dccaf9 | 3,984 | t | Perl | tests/list_file_types.t | boriskro/the_silver_searcher | d2e8f1c2d73ddab3a7c7175bb81191ab8a092e98 | [
"Apache-2.0"
] | null | null | null | tests/list_file_types.t | boriskro/the_silver_searcher | d2e8f1c2d73ddab3a7c7175bb81191ab8a092e98 | [
"Apache-2.0"
] | null | null | null | tests/list_file_types.t | boriskro/the_silver_searcher | d2e8f1c2d73ddab3a7c7175bb81191ab8a092e98 | [
"Apache-2.0"
] | null | null | null | Setup:
$ . $TESTDIR/setup.sh
Language types are output:
$ ag --list-file-types
The following file types are supported:
--actionscript
.as .mxml
--ada
.ada .adb .ads
--asm
.asm .s
--batch
.bat .cmd
--bitbake
.bb .bbappend .bbclass .inc
--bro
.bro .bif
--cc
.c .h .xs
--cfmx
.cfc .cfm .cfml
--chpl
.chpl
--clojure
.clj .cljs .cljc .cljx
--coffee
.coffee .cjsx
--cpp
.cpp .cc .C .cxx .m .hpp .hh .h .H .hxx .tpp
--crystal
.cr .ecr
--csharp
.cs
--css
.css
--cython
.pyx .pxd .pxi
--delphi
.pas .int .dfm .nfm .dof .dpk .dpr .dproj .groupproj .bdsgroup .bdsproj
--ebuild
.ebuild .eclass
--elisp
.el
--elixir
.ex .eex .exs
--erlang
.erl .hrl
--factor
.factor
--fortran
.f .f77 .f90 .f95 .f03 .for .ftn .fpp
--fsharp
.fs .fsi .fsx
--gettext
.po .pot .mo
--glsl
.vert .tesc .tese .geom .frag .comp
--go
.go
--groovy
.groovy .gtmpl .gpp .grunit .gradle
--haml
.haml
--haskell
.hs .lhs
--hh
.h
--html
.htm .html .shtml .xhtml
--ini
.ini
--jade
.jade
--java
.java .properties
--js
.js .jsx .vue
--json
.json
--jsp
.jsp .jspx .jhtm .jhtml
--julia
.jl
--kotlin
.kt
--less
.less
--liquid
.liquid
--lisp
.lisp .lsp
--log
.log
--lua
.lua
--m4
.m4
--make
.Makefiles .mk .mak
--mako
.mako
--markdown
.markdown .mdown .mdwn .mkdn .mkd .md
--mason
.mas .mhtml .mpl .mtxt
--matlab
.m
--mathematica
.m .wl
--mercury
.m .moo
--nim
.nim
--objc
.m .h
--objcpp
.mm .h
--ocaml
.ml .mli .mll .mly
--octave
.m
--parrot
.pir .pasm .pmc .ops .pod .pg .tg
--perl
.pl .pm .pm6 .pod .t
--php
.php .phpt .php3 .php4 .php5 .phtml
--pike
.pike .pmod
--plone
.pt .cpt .metadata .cpy .py .xml .zcml
--proto
.proto
--puppet
.pp
--python
.py
--qml
.qml
--racket
.rkt .ss .scm
--rake
.Rakefile
--restructuredtext
.rst
--rs
.rs
--r
.R .Rmd .Rnw .Rtex .Rrst
--rdoc
.rdoc
--ruby
.rb .rhtml .rjs .rxml .erb .rake .spec
--rust
.rs
--salt
.sls
--sass
.sass .scss
--scala
.scala
--scheme
.scm .ss
--shell
.sh .bash .csh .tcsh .ksh .zsh .fish
--smalltalk
.st
--sml
.sml .fun .mlb .sig
--sql
.sql .ctl
--stylus
.styl
--swift
.swift
--tcl
.tcl .itcl .itk
--tex
.tex .cls .sty
--tt
.tt .tt2 .ttml
--toml
.toml
--ts
.ts .tsx
--vala
.vala .vapi
--vb
.bas .cls .frm .ctl .vb .resx
--velocity
.vm .vtl .vsl
--verilog
.v .vh .sv
--vhdl
.vhd .vhdl
--vim
.vim
--wix
.wxi .wxs
--wsdl
.wsdl
--wadl
.wadl
--xml
.xml .dtd .xsl .xslt .ent .tld
--yaml
.yaml .yml
| 12.769231 | 89 | 0.360442 |
ed7ffd0efaa059ee42ea1fd6bc4dfff9207d75e9 | 1,969 | t | Perl | t/gen.record.3.t | graphviz-perl/GraphViz2 | 5824099a0db85478515787c9300cc016915ad9a9 | [
"Artistic-1.0"
] | 4 | 2020-11-09T10:15:20.000Z | 2021-10-18T14:17:26.000Z | t/gen.record.3.t | ronsavage/GraphViz2 | 5824099a0db85478515787c9300cc016915ad9a9 | [
"Artistic-1.0"
] | 7 | 2016-10-19T14:09:16.000Z | 2020-10-13T12:59:18.000Z | t/gen.record.3.t | graphviz-perl/GraphViz2 | 5824099a0db85478515787c9300cc016915ad9a9 | [
"Artistic-1.0"
] | 8 | 2015-11-04T23:40:23.000Z | 2020-05-27T16:54:24.000Z | # Annotation: Deeply nested records using nested array-refs.
use strict;
use warnings;
use File::Spec;
use GraphViz2;
my $id = '3';
my $graph = GraphViz2->new(
global => {directed => 1, combine_node_and_port => 0, record_shape => 'record'},
graph => {
label => "Record demo $id - Deeply nested records " .
"using nested array-refs"
},
);
$graph->add_node(name => 'Alphabet', label => [
{ port => 'port_a', text => 'a:port_a' },
[
{ port => 'port_b', text => 'b:port_b' },
'c',
[
{ port => 'port_d', text => 'd:port_d' },
'e',
'f',
[
'g',
{ port => 'port_h', text => 'h:port_h' },
'i',
'j',
[
'k',
'l',
'm',
{ port => 'port_n', text => 'n:port_n' },
'o',
'p',
],
'q',
'r',
{ port => 'port_s', text => 's:port_s' },
't',
],
'u',
'v',
{ port => 'port_w', text => 'w:port_w' },
],
'x',
{ port => 'port_y', text => 'y:port_y' },
],
'z',
]);
$graph -> add_edge(
from => 'Alphabet', tailport => 'port_a', to => 'Alphabet', headport => 'port_n', color => 'maroon',
);
$graph -> add_edge(
from => 'Alphabet', tailport => 'port_b', to => 'Alphabet', headport => 'port_s', color => 'blue',
);
$graph -> add_edge(
from => 'Alphabet', tailport => 'port_d', to => 'Alphabet', headport => 'port_w', color => 'red',
);
$graph -> add_edge(
from => 'Alphabet', tailport => 'port_y', to => 'Alphabet', headport => 'port_h', color => 'green',
);
if (@ARGV) {
my($format) = shift || 'svg';
my($output_file) = shift || File::Spec -> catfile('html', "record.$id.$format");
$graph -> run(format => $format, output_file => $output_file);
} else {
# run as a test
require Test::More;
require Test::Snapshot;
Test::Snapshot::is_deeply_snapshot($graph->dot_input, 'dot file');
Test::More::done_testing();
}
| 25.24359 | 102 | 0.493652 |
edc4657f916c765f75b7ec7e329f1762604f0a5b | 32,533 | pm | Perl | code/exiftool/lib/Image/ExifTool/PanasonicRaw.pm | timlawrenz/depot-photos | a3958c938ddd66acd6f456daec5b31a17264778a | [
"Apache-2.0"
] | 91 | 2015-01-02T11:07:08.000Z | 2022-02-15T05:47:08.000Z | bin/lib/Image/ExifTool/PanasonicRaw.pm | AndrewHaluza/exif-manager | 437c8a56ff9eb812c09f68518ad2e3b562942585 | [
"MIT"
] | 29 | 2015-08-02T02:46:08.000Z | 2022-03-11T09:52:01.000Z | bin/lib/Image/ExifTool/PanasonicRaw.pm | AndrewHaluza/exif-manager | 437c8a56ff9eb812c09f68518ad2e3b562942585 | [
"MIT"
] | 18 | 2015-01-02T11:10:53.000Z | 2021-03-29T13:01:14.000Z | #------------------------------------------------------------------------------
# File: PanasonicRaw.pm
#
# Description: Read/write Panasonic/Leica RAW/RW2/RWL meta information
#
# Revisions: 2009/03/24 - P. Harvey Created
# 2009/05/12 - PH Added RWL file type (same format as RW2)
#
# References: 1) https://exiftool.org/forum/index.php/topic,1542.0.html
# 2) http://www.cybercom.net/~dcoffin/dcraw/
# 3) http://syscall.eu/#pana
# 4) Klaus Homeister private communication
# IB) Iliah Borg private communication (LibRaw)
# JD) Jens Duttke private communication (TZ3,FZ30,FZ50)
#------------------------------------------------------------------------------
package Image::ExifTool::PanasonicRaw;
use strict;
use vars qw($VERSION);
use Image::ExifTool qw(:DataAccess :Utils);
use Image::ExifTool::Exif;
$VERSION = '1.25';
sub ProcessJpgFromRaw($$$);
sub WriteJpgFromRaw($$$);
sub WriteDistortionInfo($$$);
sub ProcessDistortionInfo($$$);
my %jpgFromRawMap = (
IFD1 => 'IFD0',
EXIF => 'IFD0', # to write EXIF as a block
ExifIFD => 'IFD0',
GPS => 'IFD0',
SubIFD => 'IFD0',
GlobParamIFD => 'IFD0',
PrintIM => 'IFD0',
InteropIFD => 'ExifIFD',
MakerNotes => 'ExifIFD',
IFD0 => 'APP1',
MakerNotes => 'ExifIFD',
Comment => 'COM',
);
my %wbTypeInfo = (
PrintConv => \%Image::ExifTool::Exif::lightSource,
SeparateTable => 'EXIF LightSource',
);
my %panasonicWhiteBalance = ( #forum9396
0 => 'Auto',
1 => 'Daylight',
2 => 'Cloudy',
3 => 'Tungsten',
4 => 'n/a',
5 => 'Flash',
6 => 'n/a',
7 => 'n/a',
8 => 'Custom#1',
9 => 'Custom#2',
10 => 'Custom#3',
11 => 'Custom#4',
12 => 'Shade',
13 => 'Kelvin',
16 => 'AWBc', # GH5 and G9 (Makernotes WB==19)
);
# Tags found in Panasonic RAW/RW2/RWL images (ref PH)
%Image::ExifTool::PanasonicRaw::Main = (
GROUPS => { 0 => 'EXIF', 1 => 'IFD0', 2 => 'Image'},
WRITE_PROC => \&Image::ExifTool::Exif::WriteExif,
CHECK_PROC => \&Image::ExifTool::Exif::CheckExif,
WRITE_GROUP => 'IFD0', # default write group
NOTES => 'These tags are found in IFD0 of Panasonic/Leica RAW, RW2 and RWL images.',
0x01 => {
Name => 'PanasonicRawVersion',
Writable => 'undef',
},
0x02 => 'SensorWidth', #1/PH
0x03 => 'SensorHeight', #1/PH
0x04 => 'SensorTopBorder', #JD
0x05 => 'SensorLeftBorder', #JD
0x06 => 'SensorBottomBorder', #PH
0x07 => 'SensorRightBorder', #PH
# observed values for unknown tags - PH
# 0x08: 1
# 0x09: 1,3,4
# 0x0a: 12
# (IB gave 0x08-0x0a as BlackLevel tags, but Klaus' decoding makes more sense)
0x08 => { Name => 'SamplesPerPixel', Writable => 'int16u', Protected => 1 }, #4
0x09 => { #4
Name => 'CFAPattern',
Writable => 'int16u',
Protected => 1,
PrintConv => {
0 => 'n/a',
1 => '[Red,Green][Green,Blue]', # (CM-1, FZ70)
2 => '[Green,Red][Blue,Green]', # (LX-7)
3 => '[Green,Blue][Red,Green]', # (ZS100, FZ2500, FZ1000, ...)
4 => '[Blue,Green][Green,Red]', # (LC-100, G-7, V-LUX1, ...)
},
},
0x0a => { Name => 'BitsPerSample', Writable => 'int16u', Protected => 1 }, #4
0x0b => { #4
Name => 'Compression',
Writable => 'int16u',
Protected => 1,
PrintConv => {
34316 => 'Panasonic RAW 1', # (most models - RAW/RW2/RWL)
34826 => 'Panasonic RAW 2', # (DIGILUX 2 - RAW)
34828 => 'Panasonic RAW 3', # (D-LUX2,D-LUX3,FZ30,LX1 - RAW)
34830 => 'Panasonic RAW 4', #IB (Leica DIGILUX 3, Panasonic DMC-L1)
},
},
# 0x0c: 2 (only Leica Digilux 2)
# 0x0d: 0,1
# 0x0e,0x0f,0x10: 4095
0x0e => { Name => 'LinearityLimitRed', Writable => 'int16u' }, #IB
0x0f => { Name => 'LinearityLimitGreen', Writable => 'int16u' }, #IB
0x10 => { Name => 'LinearityLimitBlue', Writable => 'int16u' }, #IB
0x11 => { #JD
Name => 'RedBalance',
Writable => 'int16u',
ValueConv => '$val / 256',
ValueConvInv => 'int($val * 256 + 0.5)',
Notes => 'found in Digilux 2 RAW images',
},
0x12 => { #JD
Name => 'BlueBalance',
Writable => 'int16u',
ValueConv => '$val / 256',
ValueConvInv => 'int($val * 256 + 0.5)',
},
0x13 => { #IB
Name => 'WBInfo',
SubDirectory => { TagTable => 'Image::ExifTool::PanasonicRaw::WBInfo' },
},
0x17 => { #1
Name => 'ISO',
Writable => 'int16u',
},
# 0x18,0x19,0x1a: 0
0x18 => { #IB
Name => 'HighISOMultiplierRed',
Writable => 'int16u',
ValueConv => '$val / 256',
ValueConvInv => 'int($val * 256 + 0.5)',
},
0x19 => { #IB
Name => 'HighISOMultiplierGreen',
Writable => 'int16u',
ValueConv => '$val / 256',
ValueConvInv => 'int($val * 256 + 0.5)',
},
0x1a => { #IB
Name => 'HighISOMultiplierBlue',
Writable => 'int16u',
ValueConv => '$val / 256',
ValueConvInv => 'int($val * 256 + 0.5)',
},
# 0x1b: [binary data] (something to do with the camera ISO cababilities: int16u count N,
# followed by table of N entries: int16u ISO, int16u[3] RGB gains - ref IB)
0x1c => { Name => 'BlackLevelRed', Writable => 'int16u' }, #IB
0x1d => { Name => 'BlackLevelGreen', Writable => 'int16u' }, #IB
0x1e => { Name => 'BlackLevelBlue', Writable => 'int16u' }, #IB
0x24 => { #2
Name => 'WBRedLevel',
Writable => 'int16u',
},
0x25 => { #2
Name => 'WBGreenLevel',
Writable => 'int16u',
},
0x26 => { #2
Name => 'WBBlueLevel',
Writable => 'int16u',
},
0x27 => { #IB
Name => 'WBInfo2',
SubDirectory => { TagTable => 'Image::ExifTool::PanasonicRaw::WBInfo2' },
},
# 0x27,0x29,0x2a,0x2b,0x2c: [binary data]
0x2d => { #IB
Name => 'RawFormat',
Writable => 'int16u',
Protected => 1,
# 2 - RAW DMC-FZ8/FZ18
# 3 - RAW DMC-L10
# 4 - RW2 for most other models, including G9 in "pixel shift off" mode and YUNEEC CGO4
# (must add 15 to black levels for RawFormat == 4)
# 5 - RW2 DC-GH5s; G9 in "pixel shift on" mode
# 6 - RW2 DC-S1, DC-S1r in "pixel shift off" mode
# 7 - RW2 DC-S1r (and probably DC-S1, have no raw samples) in "pixel shift on" mode
# not used - DMC-LX1/FZ30/FZ50/L1/LX1/LX2
# (modes 5 and 7 are lossless)
},
0x2e => { #JD
Name => 'JpgFromRaw', # (writable directory!)
Groups => { 2 => 'Preview' },
Writable => 'undef',
# protect this tag because it contains all the metadata
Flags => [ 'Binary', 'Protected', 'NestedHtmlDump', 'BlockExtract' ],
Notes => 'processed as an embedded document because it contains full EXIF',
WriteCheck => '$val eq "none" ? undef : $self->CheckImage(\$val)',
DataTag => 'JpgFromRaw',
RawConv => '$self->ValidateImage(\$val,$tag)',
SubDirectory => {
# extract information from embedded image since it is metadata-rich,
# unless HtmlDump option set (note that the offsets will be relative,
# not absolute like they should be in verbose mode)
TagTable => 'Image::ExifTool::JPEG::Main',
WriteProc => \&WriteJpgFromRaw,
ProcessProc => \&ProcessJpgFromRaw,
},
},
0x2f => { Name => 'CropTop', Writable => 'int16u' },
0x30 => { Name => 'CropLeft', Writable => 'int16u' },
0x31 => { Name => 'CropBottom', Writable => 'int16u' },
0x32 => { Name => 'CropRight', Writable => 'int16u' },
0x10f => {
Name => 'Make',
Groups => { 2 => 'Camera' },
Writable => 'string',
DataMember => 'Make',
# save this value as an ExifTool member variable
RawConv => '$self->{Make} = $val',
},
0x110 => {
Name => 'Model',
Description => 'Camera Model Name',
Groups => { 2 => 'Camera' },
Writable => 'string',
DataMember => 'Model',
# save this value as an ExifTool member variable
RawConv => '$self->{Model} = $val',
},
0x111 => {
Name => 'StripOffsets',
# (this value is 0xffffffff for some models, and RawDataOffset must be used)
Flags => [ 'IsOffset', 'PanasonicHack' ],
OffsetPair => 0x117, # point to associated byte counts
ValueConv => 'length($val) > 32 ? \$val : $val',
},
0x112 => {
Name => 'Orientation',
Writable => 'int16u',
PrintConv => \%Image::ExifTool::Exif::orientation,
Priority => 0, # so IFD1 doesn't take precedence
},
0x116 => {
Name => 'RowsPerStrip',
Priority => 0,
},
0x117 => {
Name => 'StripByteCounts',
# (note that this value may represent something like uncompressed byte count
# for RAW/RW2/RWL images from some models, and is zero for some other models)
OffsetPair => 0x111, # point to associated offset
ValueConv => 'length($val) > 32 ? \$val : $val',
},
0x118 => {
Name => 'RawDataOffset', #PH (RW2/RWL)
IsOffset => '$$et{TIFF_TYPE} =~ /^(RW2|RWL)$/', # (invalid in DNG-converted files)
PanasonicHack => 1,
OffsetPair => 0x117, # (use StripByteCounts as the offset pair)
NotRealPair => 1, # (to avoid Validate warning)
},
0x119 => {
Name => 'DistortionInfo',
SubDirectory => { TagTable => 'Image::ExifTool::PanasonicRaw::DistortionInfo' },
},
# 0x11b - chromatic aberration correction (ref 3) (also see forum9366)
0x11c => { #forum9373
Name => 'Gamma',
Writable => 'int16u',
# unfortunately it seems that the scaling factor varies with model...
ValueConv => '$val / ($val >= 1024 ? 1024 : ($val >= 256 ? 256 : 100))',
ValueConvInv => 'int($val * 256 + 0.5)',
},
0x120 => {
Name => 'CameraIFD',
SubDirectory => {
TagTable => 'Image::ExifTool::PanasonicRaw::CameraIFD',
Base => '$start',
ProcessProc => \&Image::ExifTool::ProcessTIFF,
},
},
0x121 => { #forum9295
Name => 'Multishot',
Writable => 'int32u',
PrintConv => {
0 => 'Off',
65536 => 'Pixel Shift',
},
},
# 0x122 - int32u: RAWDataOffset for the GH5s/GX9, or pointer to end of raw data for G9 (forum9295)
0x2bc => { # PH Extension!!
Name => 'ApplicationNotes', # (writable directory!)
Writable => 'int8u',
Format => 'undef',
Flags => [ 'Binary', 'Protected' ],
SubDirectory => {
DirName => 'XMP',
TagTable => 'Image::ExifTool::XMP::Main',
},
},
0x001b => { #forum9250
Name => 'NoiseReductionParams',
Writable => 'undef',
Format => 'int16u',
Count => -1,
Flags => 'Protected',
Notes => q{
the camera's default noise reduction setup. The first number is the number
of entries, then for each entry there are 4 numbers: an ISO speed, and
noise-reduction strengths the R, G and B channels
},
},
0x83bb => { # PH Extension!!
Name => 'IPTC-NAA', # (writable directory!)
Format => 'undef', # convert binary values as undef
Writable => 'int32u', # but write int32u format code in IFD
WriteGroup => 'IFD0',
Flags => [ 'Binary', 'Protected' ],
SubDirectory => {
DirName => 'IPTC',
TagTable => 'Image::ExifTool::IPTC::Main',
},
},
0x8769 => {
Name => 'ExifOffset',
Groups => { 1 => 'ExifIFD' },
Flags => 'SubIFD',
SubDirectory => {
TagTable => 'Image::ExifTool::Exif::Main',
DirName => 'ExifIFD',
Start => '$val',
},
},
0x8825 => {
Name => 'GPSInfo',
Groups => { 1 => 'GPS' },
Flags => 'SubIFD',
SubDirectory => {
DirName => 'GPS',
TagTable => 'Image::ExifTool::GPS::Main',
Start => '$val',
},
},
# 0xffff => 'DCSHueShiftValues', #exifprobe (NC)
);
# white balance information (ref IB)
# (PanasonicRawVersion<200: Digilux 2)
%Image::ExifTool::PanasonicRaw::WBInfo = (
PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData,
WRITE_PROC => \&Image::ExifTool::WriteBinaryData,
CHECK_PROC => \&Image::ExifTool::CheckBinaryData,
WRITABLE => 1,
FORMAT => 'int16u',
FIRST_ENTRY => 0,
0 => 'NumWBEntries',
1 => { Name => 'WBType1', %wbTypeInfo },
2 => { Name => 'WB_RBLevels1', Format => 'int16u[2]' },
4 => { Name => 'WBType2', %wbTypeInfo },
5 => { Name => 'WB_RBLevels2', Format => 'int16u[2]' },
7 => { Name => 'WBType3', %wbTypeInfo },
8 => { Name => 'WB_RBLevels3', Format => 'int16u[2]' },
10 => { Name => 'WBType4', %wbTypeInfo },
11 => { Name => 'WB_RBLevels4', Format => 'int16u[2]' },
13 => { Name => 'WBType5', %wbTypeInfo },
14 => { Name => 'WB_RBLevels5', Format => 'int16u[2]' },
16 => { Name => 'WBType6', %wbTypeInfo },
17 => { Name => 'WB_RBLevels6', Format => 'int16u[2]' },
19 => { Name => 'WBType7', %wbTypeInfo },
20 => { Name => 'WB_RBLevels7', Format => 'int16u[2]' },
);
# white balance information (ref IB)
# (PanasonicRawVersion>=200: D-Lux2, D-Lux3, DMC-FZ18/FZ30/LX1/L10)
%Image::ExifTool::PanasonicRaw::WBInfo2 = (
PROCESS_PROC => \&Image::ExifTool::ProcessBinaryData,
WRITE_PROC => \&Image::ExifTool::WriteBinaryData,
CHECK_PROC => \&Image::ExifTool::CheckBinaryData,
WRITABLE => 1,
FORMAT => 'int16u',
FIRST_ENTRY => 0,
0 => 'NumWBEntries',
1 => { Name => 'WBType1', %wbTypeInfo },
2 => { Name => 'WB_RGBLevels1', Format => 'int16u[3]' },
5 => { Name => 'WBType2', %wbTypeInfo },
6 => { Name => 'WB_RGBLevels2', Format => 'int16u[3]' },
9 => { Name => 'WBType3', %wbTypeInfo },
10 => { Name => 'WB_RGBLevels3', Format => 'int16u[3]' },
13 => { Name => 'WBType4', %wbTypeInfo },
14 => { Name => 'WB_RGBLevels4', Format => 'int16u[3]' },
17 => { Name => 'WBType5', %wbTypeInfo },
18 => { Name => 'WB_RGBLevels5', Format => 'int16u[3]' },
21 => { Name => 'WBType6', %wbTypeInfo },
22 => { Name => 'WB_RGBLevels6', Format => 'int16u[3]' },
25 => { Name => 'WBType7', %wbTypeInfo },
26 => { Name => 'WB_RGBLevels7', Format => 'int16u[3]' },
);
# lens distortion information (ref 3)
# (distortion correction equation: Ru = scale*(Rd + a*Rd^3 + b*Rd^5 + c*Rd^7), ref 3)
%Image::ExifTool::PanasonicRaw::DistortionInfo = (
PROCESS_PROC => \&ProcessDistortionInfo,
WRITE_PROC => \&WriteDistortionInfo,
CHECK_PROC => \&Image::ExifTool::CheckBinaryData,
# (don't make this family 0 MakerNotes because we don't want it to be a deletable group)
GROUPS => { 0 => 'PanasonicRaw', 1 => 'PanasonicRaw', 2 => 'Image'},
WRITABLE => 1,
FORMAT => 'int16s',
FIRST_ENTRY => 0,
NOTES => 'Lens distortion correction information.',
# 0,1 - checksums
2 => {
Name => 'DistortionParam02',
ValueConv => '$val / 32768',
ValueConvInv => '$val * 32768',
},
# 3 - usually 0, but seen 0x026b when value 5 is non-zero
4 => {
Name => 'DistortionParam04',
ValueConv => '$val / 32768',
ValueConvInv => '$val * 32768',
},
5 => {
Name => 'DistortionScale',
ValueConv => '1 / (1 + $val/32768)',
ValueConvInv => '(1/$val - 1) * 32768',
},
# 6 - seen 0x0000-0x027f
7.1 => {
Name => 'DistortionCorrection',
Mask => 0x0f,
# (have seen the upper 4 bits set for GF5 and GX1, giving a value of -4095 - PH)
PrintConv => { 0 => 'Off', 1 => 'On' },
},
8 => {
Name => 'DistortionParam08',
ValueConv => '$val / 32768',
ValueConvInv => '$val * 32768',
},
9 => {
Name => 'DistortionParam09',
ValueConv => '$val / 32768',
ValueConvInv => '$val * 32768',
},
# 10 - seen 0xfc,0x0101,0x01f4,0x021d,0x0256
11 => {
Name => 'DistortionParam11',
ValueConv => '$val / 32768',
ValueConvInv => '$val * 32768',
},
12 => {
Name => 'DistortionN',
Unknown => 1,
},
# 13 - seen 0x0000,0x01f9-0x02b2
# 14,15 - checksums
);
# Panasonic RW2 camera IFD written by GH5 (ref PH)
# (doesn't seem to be valid for the GF7 or GM5 -- encrypted?)
%Image::ExifTool::PanasonicRaw::CameraIFD = (
GROUPS => { 0 => 'PanasonicRaw', 1 => 'CameraIFD', 2 => 'Camera'},
# (don't know what format codes 0x101 and 0x102 are for, so just
# map them into 4 = int32u for now)
VARS => { MAP_FORMAT => { 0x101 => 4, 0x102 => 4 } },
0x1001 => { #forum9388
Name => 'MultishotOn',
Writable => 'int32u',
PrintConv => { 0 => 'No', 1 => 'Yes' },
},
0x1100 => { #forum9274
Name => 'FocusStepNear',
Writable => 'int16s',
},
0x1101 => { #forum9274 (was forum8484)
Name => 'FocusStepCount',
Writable => 'int16s',
},
0x1102 => { #forum9417
Name => 'FlashFired',
Writable => 'int32u',
PrintConv => { 0 => 'No', 1 => 'Yes' },
},
# 0x1104 - set when camera shoots on lowest possible Extended-ISO (forum9290)
0x1105 => { #forum9392
Name => 'ZoomPosition',
Notes => 'in the range 0-255 for most cameras',
Writable => 'int32u',
},
0x1200 => { #forum9278
Name => 'LensAttached',
Notes => 'many CameraIFD tags are invalid if there is no lens attached',
Writable => 'int32u',
PrintConv => { 0 => 'No', 1 => 'Yes' },
},
# Note: LensTypeMake and LensTypeModel are combined into a Composite LensType tag
# defined in Olympus.pm which has the same values as Olympus:LensType
0x1201 => { #IB
Name => 'LensTypeMake',
Condition => '$format eq "int16u"',
Writable => 'int16u',
# when format is int16u, these values have been observed:
# 0 - Olympus or unknown lens
# 2 - Leica or Lumix lens
# when format is int32u (S models), these values have been observed (ref IB):
# 256 - Leica lens
# 257 - Lumix lens
# 258 - ? (seen once)
},
0x1202 => { #IB
Name => 'LensTypeModel',
Condition => '$format eq "int16u"',
Writable => 'int16u',
RawConv => q{
return undef unless $val;
require Image::ExifTool::Olympus; # (to load Composite LensID)
return $val;
},
ValueConv => '$_=sprintf("%.4x",$val); s/(..)(..)/$2 $1/; $_',
ValueConvInv => '$val =~ s/(..) (..)/$2$1/; hex($val)',
},
0x1203 => { #4
Name => 'FocalLengthIn35mmFormat',
Writable => 'int16u',
PrintConv => '"$val mm"',
PrintConvInv => '$val=~s/\s*mm$//;$val',
},
# 0x1300 - incident light value? (ref forum11395)
0x1301 => { #forum11395
Name => 'ApertureValue',
Writable => 'int16s',
Priority => 0,
ValueConv => '2 ** ($val / 512)',
ValueConvInv => '$val>0 ? 512*log($val)/log(2) : 0',
PrintConv => 'sprintf("%.1f",$val)',
PrintConvInv => '$val',
},
0x1302 => { #forum11395
Name => 'ShutterSpeedValue',
Writable => 'int16s',
Priority => 0,
ValueConv => 'abs($val/256)<100 ? 2**(-$val/256) : 0',
ValueConvInv => '$val>0 ? -256*log($val)/log(2) : -25600',
PrintConv => 'Image::ExifTool::Exif::PrintExposureTime($val)',
PrintConvInv => 'Image::ExifTool::Exif::ConvertFraction($val)',
},
0x1303 => { #forum11395
Name => 'SensitivityValue',
Writable => 'int16s',
ValueConv => '$val / 256',
ValueConvInv => 'int($val * 256)',
},
0x1305 => { #forum9384
Name => 'HighISOMode',
Writable => 'int16u',
RawConv => '$val || undef',
PrintConv => { 1 => 'On', 2 => 'Off' },
},
# 0x1306 EV for some models like the GX8 (forum11395)
# 0x140b - scaled overall black level? (ref forum9281)
# 0x1411 - scaled black level per channel difference (ref forum9281)
0x1412 => { #forum11397
Name => 'FacesDetected',
Writable => 'int8u',
PrintConv => { 0 => 'No', 1 => 'Yes' },
},
# 0x2000 - WB tungsten=3, daylight=4 (ref forum9467)
# 0x2009 - scaled black level per channel (ref forum9281)
# 0x3000-0x310b - red/blue balances * 1024 (ref forum9467)
# 0x3000 modifiedTungsten-Red (-2?)
# 0x3001 modifiedTungsten-Blue (-2?)
# 0x3002 modifiedDaylight-Red (-2?)
# 0x3003 modifiedDaylight-Blue (-2?)
# 0x3004 modifiedTungsten-Red (-1?)
# 0x3005 modifiedTungsten-Blue (-1?)
# 0x3006 modifiedDaylight-Red (-1?)
# 0x3007 modifiedDaylight-Blue (-1?)
# 0x3100 DefaultTungsten-Red
# 0x3101 DefaultTungsten-Blue
# 0x3102 DefaultDaylight-Red
# 0x3103 DefaultDaylight-Blue
# 0x3104 modifiedTungsten-Red (+1?)
# 0x3105 modifiedTungsten-Blue (+1?)
# 0x3106 modifiedDaylight-Red (+1?)
# 0x3107 modifiedDaylight-Blue (+1?)
# 0x3108 modifiedTungsten-Red (+2?)
# 0x3109 modifiedTungsten-Blue (+2?)
# 0x310a modifiedDaylight-Red (+2?)
# 0x310b modifiedDaylight-Blue (+2?)
0x3200 => { #forum9275
Name => 'WB_CFA0_LevelDaylight',
Writable => 'int16u',
},
0x3201 => { #forum9275
Name => 'WB_CFA1_LevelDaylight',
Writable => 'int16u',
},
0x3202 => { #forum9275
Name => 'WB_CFA2_LevelDaylight',
Writable => 'int16u',
},
0x3203 => { #forum9275
Name => 'WB_CFA3_LevelDaylight',
Writable => 'int16u',
},
# 0x3204-0x3207 - user multipliers * 1024 ? (ref forum9275)
# 0x320a - scaled maximum value of raw data (scaling = 4x) (ref forum9281)
# 0x3209 - gamma (x256) (ref forum9281)
0x3300 => { #forum9296/9396
Name => 'WhiteBalanceSet',
Writable => 'int8u',
PrintConv => \%panasonicWhiteBalance,
SeparateTable => 'WhiteBalance',
},
0x3420 => { #forum9276
Name => 'WB_RedLevelAuto',
Writable => 'int16u',
},
0x3421 => { #forum9276
Name => 'WB_BlueLevelAuto',
Writable => 'int16u',
},
0x3501 => { #4
Name => 'Orientation',
Writable => 'int8u',
PrintConv => \%Image::ExifTool::Exif::orientation,
},
# 0x3504 = Tag 0x1301+0x1302-0x1303 (Bv = Av+Tv-Sv) (forum11395)
# 0x3505 - same as 0x1300 (forum11395)
0x3600 => { #forum9396
Name => 'WhiteBalanceDetected',
Writable => 'int8u',
PrintConv => \%panasonicWhiteBalance,
SeparateTable => 'WhiteBalance',
},
);
# PanasonicRaw composite tags
%Image::ExifTool::PanasonicRaw::Composite = (
ImageWidth => {
Require => {
0 => 'IFD0:SensorLeftBorder',
1 => 'IFD0:SensorRightBorder',
},
ValueConv => '$val[1] - $val[0]',
},
ImageHeight => {
Require => {
0 => 'IFD0:SensorTopBorder',
1 => 'IFD0:SensorBottomBorder',
},
ValueConv => '$val[1] - $val[0]',
},
);
# add our composite tags
Image::ExifTool::AddCompositeTags('Image::ExifTool::PanasonicRaw');
#------------------------------------------------------------------------------
# checksum algorithm for lens distortion correction information (ref 3)
# Inputs: 0) data ref, 1) start position, 2) number of bytes, 3) incement
# Returns: checksum value
sub Checksum($$$$)
{
my ($dataPt, $start, $num, $inc) = @_;
my $csum = 0;
my $i;
for ($i=0; $i<$num; ++$i) {
$csum = (73 * $csum + Get8u($dataPt, $start + $i * $inc)) % 0xffef;
}
return $csum;
}
#------------------------------------------------------------------------------
# Read lens distortion information
# Inputs: 0) ExifTool ref, 1) dirInfo ref, 2) tag table ref
# Returns: 1 on success
sub ProcessDistortionInfo($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $start = $$dirInfo{DirStart} || 0;
my $size = $$dirInfo{DirLen} || (length($$dataPt) - $start);
if ($size == 32) {
# verify the checksums (ref 3)
my $csum1 = Checksum($dataPt, $start + 4, 12, 1);
my $csum2 = Checksum($dataPt, $start + 16, 12, 1);
my $csum3 = Checksum($dataPt, $start + 2, 14, 2);
my $csum4 = Checksum($dataPt, $start + 3, 14, 2);
my $res = $csum1 ^ Get16u($dataPt, $start + 2) ^
$csum2 ^ Get16u($dataPt, $start + 28) ^
$csum3 ^ Get16u($dataPt, $start + 0) ^
$csum4 ^ Get16u($dataPt, $start + 30);
$et->Warn('Invalid DistortionInfo checksum',1) if $res;
} else {
$et->Warn('Invalid DistortionInfo',1);
}
return $et->ProcessBinaryData($dirInfo, $tagTablePtr);
}
#------------------------------------------------------------------------------
# Write lens distortion information
# Inputs: 0) ExifTool ref, 1) dirInfo ref, 2) tag table ref
# Returns: updated distortion information or undef on error
sub WriteDistortionInfo($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
$et or return 1; # (allow dummy access)
my $dat = $et->WriteBinaryData($dirInfo, $tagTablePtr);
if (defined $dat and length($dat) == 32) {
# fix checksums (ref 3)
Set16u(Checksum(\$dat, 4, 12, 1), \$dat, 2);
Set16u(Checksum(\$dat, 16, 12, 1), \$dat, 28);
Set16u(Checksum(\$dat, 2, 14, 2), \$dat, 0);
Set16u(Checksum(\$dat, 3, 14, 2), \$dat, 30);
} else {
$et->Warn('Error wriing DistortionInfo',1);
}
return $dat;
}
#------------------------------------------------------------------------------
# Patch for writing non-standard Panasonic RAW/RW2/RWL raw data
# Inputs: 0) offset info ref, 1) raf ref, 2) IFD number
# Returns: error string, or undef on success
# OffsetInfo is a hash by tag ID of lists with the following elements:
# 0 - tag info ref
# 1 - pointer to int32u offset in IFD or value data
# 2 - value count
# 3 - reference to list of original offset values
# 4 - IFD format number
sub PatchRawDataOffset($$$)
{
my ($offsetInfo, $raf, $ifd) = @_;
my $stripOffsets = $$offsetInfo{0x111};
my $stripByteCounts = $$offsetInfo{0x117};
my $rawDataOffset = $$offsetInfo{0x118};
my $err;
$err = 1 unless $ifd == 0;
$err = 1 unless $stripOffsets and $stripByteCounts and $$stripOffsets[2] == 1;
if ($rawDataOffset) {
$err = 1 unless $$rawDataOffset[2] == 1;
$err = 1 unless $$stripOffsets[3][0] == 0xffffffff or $$stripByteCounts[3][0] == 0;
}
$err and return 'Unsupported Panasonic/Leica RAW variant';
if ($rawDataOffset) {
# update StripOffsets along with this tag if it contains a reasonable value
unless ($$stripOffsets[3][0] == 0xffffffff) {
# save pointer to StripOffsets value for updating later
push @$rawDataOffset, $$stripOffsets[1];
}
# handle via RawDataOffset instead of StripOffsets
$stripOffsets = $$offsetInfo{0x111} = $rawDataOffset;
delete $$offsetInfo{0x118};
}
# determine the length of the raw data
my $pos = $raf->Tell();
$raf->Seek(0, 2) or $err = 1; # seek to end of file
my $len = $raf->Tell() - $$stripOffsets[3][0];
$raf->Seek($pos, 0);
# quick check to be sure the raw data length isn't unreasonable
# (the 22-byte length is for '<Dummy raw image data>' in our tests)
$err = 1 if ($len < 1000 and $len != 22) or $len & 0x80000000;
$err and return 'Error reading Panasonic raw data';
# update StripByteCounts info with raw data length
# (note that the original value is maintained in the file)
$$stripByteCounts[3][0] = $len;
return undef;
}
#------------------------------------------------------------------------------
# Write meta information to Panasonic JpgFromRaw in RAW/RW2/RWL image
# Inputs: 0) ExifTool object ref, 1) dirInfo ref, 2) tag table ref
# Returns: updated image data, or undef if nothing changed
sub WriteJpgFromRaw($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $byteOrder = GetByteOrder();
my $fileType = $$et{TIFF_TYPE}; # RAW, RW2 or RWL
my $dirStart = $$dirInfo{DirStart};
if ($dirStart) { # DirStart is non-zero in DNG-converted RW2/RWL
my $dirLen = $$dirInfo{DirLen} | length($$dataPt) - $dirStart;
my $buff = substr($$dataPt, $dirStart, $dirLen);
$dataPt = \$buff;
}
my $raf = new File::RandomAccess($dataPt);
my $outbuff;
my %dirInfo = (
RAF => $raf,
OutFile => \$outbuff,
);
$$et{BASE} = $$dirInfo{DataPos};
$$et{FILE_TYPE} = $$et{TIFF_TYPE} = 'JPEG';
# use a specialized map so we don't write XMP or IPTC (or other junk) into the JPEG
my $editDirs = $$et{EDIT_DIRS};
my $addDirs = $$et{ADD_DIRS};
$et->InitWriteDirs(\%jpgFromRawMap);
# don't add XMP segment (IPTC won't get added because it is in Photoshop record)
delete $$et{ADD_DIRS}{XMP};
my $result = $et->WriteJPEG(\%dirInfo);
# restore variables we changed
$$et{BASE} = 0;
$$et{FILE_TYPE} = 'TIFF';
$$et{TIFF_TYPE} = $fileType;
$$et{EDIT_DIRS} = $editDirs;
$$et{ADD_DIRS} = $addDirs;
SetByteOrder($byteOrder);
return $result > 0 ? $outbuff : $$dataPt;
}
#------------------------------------------------------------------------------
# Extract meta information from an Panasonic JpgFromRaw
# Inputs: 0) ExifTool object reference, 1) dirInfo reference
# Returns: 1 on success, 0 if this wasn't a valid JpgFromRaw image
sub ProcessJpgFromRaw($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $byteOrder = GetByteOrder();
my $fileType = $$et{TIFF_TYPE}; # RAW, RW2 or RWL
my $tagInfo = $$dirInfo{TagInfo};
my $verbose = $et->Options('Verbose');
my ($indent, $out);
$tagInfo or $et->Warn('No tag info for Panasonic JpgFromRaw'), return 0;
my $dirStart = $$dirInfo{DirStart};
if ($dirStart) { # DirStart is non-zero in DNG-converted RW2/RWL
my $dirLen = $$dirInfo{DirLen} | length($$dataPt) - $dirStart;
my $buff = substr($$dataPt, $dirStart, $dirLen);
$dataPt = \$buff;
}
$$et{BASE} = $$dirInfo{DataPos} + ($dirStart || 0);
$$et{FILE_TYPE} = $$et{TIFF_TYPE} = 'JPEG';
$$et{DOC_NUM} = 1;
# extract information from embedded JPEG
my %dirInfo = (
Parent => 'RAF',
RAF => new File::RandomAccess($dataPt),
);
if ($verbose) {
my $indent = $$et{INDENT};
$$et{INDENT} = ' ';
$out = $et->Options('TextOut');
print $out '--- DOC1:JpgFromRaw ',('-'x56),"\n";
}
# fudge HtmlDump base offsets to show as a stand-alone JPEG
$$et{BASE_FUDGE} = $$et{BASE};
my $rtnVal = $et->ProcessJPEG(\%dirInfo);
$$et{BASE_FUDGE} = 0;
# restore necessary variables for continued RW2/RWL processing
$$et{BASE} = 0;
$$et{FILE_TYPE} = 'TIFF';
$$et{TIFF_TYPE} = $fileType;
delete $$et{DOC_NUM};
SetByteOrder($byteOrder);
if ($verbose) {
$$et{INDENT} = $indent;
print $out ('-'x76),"\n";
}
return $rtnVal;
}
1; # end
__END__
=head1 NAME
Image::ExifTool::PanasonicRaw - Read/write Panasonic/Leica RAW/RW2/RWL meta information
=head1 SYNOPSIS
This module is loaded automatically by Image::ExifTool when required.
=head1 DESCRIPTION
This module contains definitions required by Image::ExifTool to read and
write meta information in Panasonic/Leica RAW, RW2 and RWL images.
=head1 AUTHOR
Copyright 2003-2021, Phil Harvey (philharvey66 at gmail.com)
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 REFERENCES
=over 4
=item L<http://www.cybercom.net/~dcoffin/dcraw/>
=back
=head1 SEE ALSO
L<Image::ExifTool::TagNames/PanasonicRaw Tags>,
L<Image::ExifTool(3pm)|Image::ExifTool>
=cut
| 35.750549 | 102 | 0.543848 |
ed83ff6e609c81308a6df7dd8f3d1417152b89a8 | 8,509 | t | Perl | S32-io/lock.t | SirBogman/roast | 0a0835a20951c93fea57a39dec1b2b8789d81fc5 | [
"Artistic-2.0"
] | 1 | 2019-11-06T05:07:10.000Z | 2019-11-06T05:07:10.000Z | S32-io/lock.t | SirBogman/roast | 0a0835a20951c93fea57a39dec1b2b8789d81fc5 | [
"Artistic-2.0"
] | null | null | null | S32-io/lock.t | SirBogman/roast | 0a0835a20951c93fea57a39dec1b2b8789d81fc5 | [
"Artistic-2.0"
] | null | null | null | use v6;
use Test;
use lib $?FILE.IO.parent(2).add("packages/Test-Helpers");
use Test::Util;
# Tests for IO::Handle.lock/.unlock methods
my $SLEEP = 1 * (%*ENV<ROAST_TIMING_SCALE> || 1);
plan 29;
#?DOES 1
sub test-lock (
Capture :$args1, Str :$args2 = '', :$fails-to-lock,
:$open-for-write, :$blocks-write, :$fails-write,
:$open-for-read, :$blocks-read, :$fails-read,
:$fh is copy, :$file is copy, :$no-close,
){
$file = make-temp-file :content<test> unless $file;
$fh = $file.IO.open(:r) if not $fh and $open-for-read;
$fh = $file.IO.open(:w) if not $fh and $open-for-write;
$fh.DEFINITE
or die 'Provide :open-for-read, :open-for-write, or :fh to test';
LEAVE $fh.close unless $no-close;
subtest "$args1.perl(), $args2.perl()" => sub {
quietly plan $fails-to-lock
?? 1
!! (!$blocks-read and !$fails-read) + $blocks-write + $fails-write
+ $blocks-read + $fails-read + 1;
if $fails-to-lock {
# XXX TODO: is it meant to be this way for Windows?
$*DISTRO.is-win
?? skip
'locking filehandle in wrong mode does not throw on Windows'
!! fails-like { $fh.lock: |$args1 }, X::IO::Lock,
'fails to lock';
return;
}
$fh.lock: |$args1;
if $blocks-write {
is_run qq|
start try \{
my \$fh = '$file'.IO.open(:w);
say "LOCKING";
\$fh.lock($args2); say "FAILED";
}
sleep $SLEEP; say "DONE"
|, {
:err(''), :out{
not .contains: 'FAILED'
and .contains: 'LOCKING'
and .contains: 'DONE'
}
}, 'we got blocked for writing';
}
if $fails-write {
is_run qq|
my \$fh = '$file'.IO.open(:w);
say "LOCKING";
my \$res = \$fh.lock($args2);
if \$res ~~ Failure and \$res.exception ~~ X::IO::Lock
\{ say "DONE"; exit }
say "FAILED";
|, {
:err(''), :out{
not .contains: 'FAILED'
and .contains: 'LOCKING'
and .contains: 'DONE'
}
}, 'we received Failure when locking for writing';
}
unless $blocks-read or $fails-read {
is_run qq|
my \$fh = '$file'.IO.open(:r);
say "LOCKING";
\$fh.lock(:shared, :non-blocking);
say "SUCCESS";
|, {
:err(''), :out{.contains: 'LOCKING' and .contains: 'SUCCESS' }
}, 'we can still lock as shared';
}
if $blocks-read {
is_run qq|
start try \{
my \$fh = '$file'.IO.open(:r);
say "LOCKING";
\$fh.lock($args2); say "FAILED";
}
sleep $SLEEP; say "DONE"
|, {
:err(''), :out{
not .contains: 'FAILED'
and .contains: 'LOCKING'
and .contains: 'DONE'
}
}, 'we got blocked for reading';
}
if $fails-read {
is_run qq|
my \$fh = '$file'.IO.open(:r);
say "LOCKING";
my \$res = \$fh.lock($args2);
if \$res ~~ Failure and \$res.exception ~~ X::IO::Lock
\{ say "DONE"; exit }
say "FAILED";
|, {
:err(''), :out{
not .contains: 'FAILED'
and .contains: 'LOCKING'
and .contains: 'DONE'
}
}, 'we received Failure when locking for reading';
}
$fh.unlock;
with $file.open(:w) {
ok .lock, '.unlock removes lock';
.unlock;
}
}
}
test-lock :open-for-read, :fails-to-lock, args1 => \();
test-lock :open-for-read, :fails-to-lock, args1 => \(:non-blocking);
test-lock :open-for-read, :fails-to-lock, args1 => \(:!non-blocking);
test-lock :open-for-read, :fails-to-lock, args1 => \(:!shared);
test-lock :open-for-read, :fails-to-lock, args1 => \(:!shared, :non-blocking);
test-lock :open-for-read, :fails-to-lock, args1 => \(:!shared, :!non-blocking);
#?rakudo.jvm 6 skip '[io grant] Could not obtain blocking, shared lock: NonWritableChannelException'
test-lock :open-for-read, :blocks-write, args1 => \(:shared);
test-lock :open-for-read, :blocks-write, args1 => \(:shared, :non-blocking);
test-lock :open-for-read, :blocks-write, args1 => \(:shared, :!non-blocking);
test-lock :open-for-read, :fails-write, args2 => ':non-blocking',
args1 => \(:shared);
test-lock :open-for-read, :fails-write, args2 => ':non-blocking',
args1 => \(:shared, :non-blocking);
test-lock :open-for-read, :fails-write, args2 => ':non-blocking',
args1 => \(:shared, :!non-blocking);
#?rakudo.jvm 3 todo '[io grant] expected Failure, bot Bool'
test-lock :open-for-write, :fails-to-lock, args1 => \(:shared);
test-lock :open-for-write, :fails-to-lock, args1 => \(:shared, :non-blocking);
test-lock :open-for-write, :fails-to-lock, args1 => \(:shared, :!non-blocking);
test-lock :open-for-write, :blocks-write, :blocks-read,
args1 => \();
test-lock :open-for-write, :blocks-write, :blocks-read,
args1 => \(:non-blocking);
test-lock :open-for-write, :blocks-write, :blocks-read,
args1 => \(:!non-blocking);
test-lock :open-for-write, :blocks-write, :blocks-read,
args1 => \(:!shared);
test-lock :open-for-write, :blocks-write, :blocks-read,
args1 => \(:!shared, :non-blocking);
test-lock :open-for-write, :blocks-write, :blocks-read,
args1 => \(:!shared, :!non-blocking);
#?rakudo.jvm 5 skip '[io grant] hangs'
test-lock :open-for-write, :fails-write, :fails-read,
args2 => ':non-blocking', args1 => \();
test-lock :open-for-write, :fails-write, :fails-read,
args2 => ':non-blocking', args1 => \(:non-blocking);
test-lock :open-for-write, :fails-write, :fails-read,
args2 => ':non-blocking', args1 => \(:!non-blocking);
test-lock :open-for-write, :fails-write, :fails-read,
args2 => ':non-blocking', args1 => \(:!shared, :non-blocking);
test-lock :open-for-write, :fails-write, :fails-read,
args2 => ':non-blocking', args1 => \(:!shared, :!non-blocking);
#?rakudo.jvm skip '[io grant] Could not obtain blocking, shared lock: NonWritableChannelException'
{
my $file = make-temp-file :content<test>;
my $fh = $file.open: :r; LEAVE $fh.close;
$fh.lock: :shared, :non-blocking;
start { sleep $SLEEP; $fh.unlock }
is_run qq|my \$fh = '$file'.IO.open: :w; \$fh.lock; print "DONE"|, {
:err(''), :out<DONE>
}, 'we get the write lock after shared lock is unlocked';
}
#?rakudo.jvm skip '[io grant] Could not obtain blocking, shared lock: NonWritableChannelException'
{
my $file = make-temp-file :content<test>;
my $fh = $file.open: :w; LEAVE $fh.close;
$fh.lock: :non-blocking;
start { sleep $SLEEP; $fh.unlock }
is_run qq|my \$fh = '$file'.IO.open: :r; \$fh.lock: :shared; print "DONE"|, {
:err(''), :out<DONE>
}, 'we get the shared lock after exclusive lock is unlocked';
}
subtest 'IO::CatHandle' => {
plan 13;
is-deeply IO::CatHandle.new.lock, Nil, '.lock on zero-handle cat handle';
is-deeply IO::CatHandle.new.unlock, Nil,
'.unlock on zero-handle cat handle';
my $cat = IO::CatHandle.new:
make-temp-file(:content<foo>).absolute,
make-temp-file(:content<bar>),
make-temp-file(:content<ber>).open;
for ^3 {
#?rakudo.jvm 3 skip '[io grant] Could not obtain blocking, shared lock: NonWritableChannelException'
test-lock :fh($cat), :file($cat.path), :fails-to-lock, :no-close,
args1 => \();
test-lock :fh($cat), :file($cat.path), :blocks-write, :no-close,
args1 => \(:shared);
test-lock :fh($cat), :file($cat.path), :no-close,
args2 => ':non-blocking',
args1 => \(:shared, :non-blocking);
$cat.next-handle;
}
is-deeply $cat.lock, Nil, '.lock on exhausted cat handle';
is-deeply $cat.unlock, Nil, '.unlock on exhausted cat handle';
}
# vim: expandtab shiftwidth=4 ft=perl6
| 36.676724 | 106 | 0.523211 |
ed94153210956378b041030cf4ce62359f6b50f7 | 1,074 | pm | Perl | lib/Google/Ads/GoogleAds/V7/Services/ReachPlanService/ListPlannableProductsRequest.pm | googleads/google-ads-perl | 3ee6c09e11330fea1e6a0c9ee9f837e5e36d8177 | [
"Apache-2.0"
] | 19 | 2019-06-21T00:43:57.000Z | 2022-03-29T14:23:01.000Z | lib/Google/Ads/GoogleAds/V7/Services/ReachPlanService/ListPlannableProductsRequest.pm | googleads/google-ads-perl | 3ee6c09e11330fea1e6a0c9ee9f837e5e36d8177 | [
"Apache-2.0"
] | 16 | 2020-03-04T07:44:53.000Z | 2021-12-15T23:06:23.000Z | lib/Google/Ads/GoogleAds/V7/Services/ReachPlanService/ListPlannableProductsRequest.pm | googleads/google-ads-perl | 3ee6c09e11330fea1e6a0c9ee9f837e5e36d8177 | [
"Apache-2.0"
] | 9 | 2020-02-28T03:00:48.000Z | 2021-11-10T14:23:02.000Z | # Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V7::Services::ReachPlanService::ListPlannableProductsRequest;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {plannableLocationId => $args->{plannableLocationId}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| 30.685714 | 93 | 0.74581 |
ed9be8f88755d7e8d187da4c9f1dbf2f56ed2915 | 385 | t | Perl | t/nqp/47-loop-control.t | rurban/nqp | e6a1ef24d68514b11f7664d465786f638ee9917e | [
"Artistic-2.0"
] | 3 | 2018-08-10T01:59:39.000Z | 2020-05-13T07:00:27.000Z | t/nqp/47-loop-control.t | rurban/nqp | e6a1ef24d68514b11f7664d465786f638ee9917e | [
"Artistic-2.0"
] | null | null | null | t/nqp/47-loop-control.t | rurban/nqp | e6a1ef24d68514b11f7664d465786f638ee9917e | [
"Artistic-2.0"
] | 2 | 2016-08-16T00:44:46.000Z | 2020-05-27T07:32:15.000Z | #! nqp
plan(3);
my $runs := 0;
while $runs < 5 {
$runs++;
last if $runs == 3;
}
ok($runs == 3, "last works in while");
$runs := 0;
my $i := 0;
while $runs < 5 {
$runs++;
next if $runs % 2;
$i++;
}
ok($i == 2, "next works in while");
$runs := 0;
$i := 0;
while $i < 5 {
$runs++;
redo if $runs % 2;
$i++;
}
ok($runs == 10, "redo works in while");
| 11.666667 | 39 | 0.446753 |
edc9217437d2f6e01481ac407616836533cf4585 | 6,228 | pm | Perl | lib/Smartcat/Client/Object/ProjectChangesModel.pm | ta2-1/smartcat-api-perl | 584f1a4c0f61a635bb28700011003bad632e188b | [
"Apache-2.0"
] | null | null | null | lib/Smartcat/Client/Object/ProjectChangesModel.pm | ta2-1/smartcat-api-perl | 584f1a4c0f61a635bb28700011003bad632e188b | [
"Apache-2.0"
] | 1 | 2020-07-20T18:06:22.000Z | 2020-07-20T23:33:45.000Z | lib/Smartcat/Client/Object/ProjectChangesModel.pm | ta2-1/smartcat-api-perl | 584f1a4c0f61a635bb28700011003bad632e188b | [
"Apache-2.0"
] | null | null | null |
=begin comment
Smartcat Integration API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end comment
=cut
#
# NOTE: This class is auto generated by the swagger code generator program.
# Do not edit the class manually.
# Ref: https://github.com/swagger-api/swagger-codegen
#
package Smartcat::Client::Object::ProjectChangesModel;
require 5.6.0;
use strict;
use warnings;
use utf8;
use JSON qw(decode_json);
use Data::Dumper;
use Module::Runtime qw(use_module);
use Log::Any qw($log);
use Date::Parse;
use DateTime;
use base ( "Class::Accessor", "Class::Data::Inheritable" );
#
#
#
# NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
# REF: https://github.com/swagger-api/swagger-codegen
#
=begin comment
Smartcat Integration API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end comment
=cut
#
# NOTE: This class is auto generated by the swagger code generator program.
# Do not edit the class manually.
# Ref: https://github.com/swagger-api/swagger-codegen
#
__PACKAGE__->mk_classdata( 'attribute_map' => {} );
__PACKAGE__->mk_classdata( 'swagger_types' => {} );
__PACKAGE__->mk_classdata( 'method_documentation' => {} );
__PACKAGE__->mk_classdata( 'class_documentation' => {} );
# new object
sub new {
my ( $class, %args ) = @_;
my $self = bless {}, $class;
foreach my $attribute ( keys %{ $class->attribute_map } ) {
my $args_key = $class->attribute_map->{$attribute};
$self->$attribute( $args{$args_key} );
}
return $self;
}
# return perl hash
sub to_hash {
return decode_json( JSON->new->convert_blessed->encode(shift) );
}
# used by JSON for serialization
sub TO_JSON {
my $self = shift;
my $_data = {};
foreach my $_key ( keys %{ $self->attribute_map } ) {
if ( defined $self->{$_key} ) {
$_data->{ $self->attribute_map->{$_key} } = $self->{$_key};
}
}
return $_data;
}
# from Perl hashref
sub from_hash {
my ( $self, $hash ) = @_;
# loop through attributes and use swagger_types to deserialize the data
while ( my ( $_key, $_type ) = each %{ $self->swagger_types } ) {
my $_json_attribute = $self->attribute_map->{$_key};
if ( $_type =~ /^array\[/i ) { # array
my $_subclass = substr( $_type, 6, -1 );
my @_array = ();
foreach my $_element ( @{ $hash->{$_json_attribute} } ) {
push @_array, $self->_deserialize( $_subclass, $_element );
}
$self->{$_key} = \@_array;
}
elsif ( exists $hash->{$_json_attribute} )
{ #hash(model), primitive, datetime
$self->{$_key} =
$self->_deserialize( $_type, $hash->{$_json_attribute} );
}
else {
$log->debugf( "Warning: %s (%s) does not exist in input hash\n",
$_key, $_json_attribute );
}
}
return $self;
}
# deserialize non-array data
sub _deserialize {
my ( $self, $type, $data ) = @_;
$log->debugf( "deserializing %s with %s", Dumper($data), $type );
if ( $type eq 'DateTime' ) {
return DateTime->from_epoch( epoch => str2time($data) );
}
elsif ( grep( /^$type$/, ( 'int', 'double', 'string', 'boolean' ) ) ) {
return $data;
}
else { # hash(model)
my $_instance = eval "Smartcat::Client::Object::$type->new()";
return $_instance->from_hash($data);
}
}
__PACKAGE__->class_documentation(
{
description => '',
class => 'ProjectChangesModel',
required => [], # TODO
}
);
__PACKAGE__->method_documentation(
{
'name' => {
datatype => 'string',
base_name => 'name',
description => '',
format => '',
read_only => '',
},
'description' => {
datatype => 'string',
base_name => 'description',
description => '',
format => '',
read_only => '',
},
'deadline' => {
datatype => 'DateTime',
base_name => 'deadline',
description => '',
format => '',
read_only => '',
},
'client_id' => {
datatype => 'string',
base_name => 'clientId',
description => '',
format => '',
read_only => '',
},
'domain_id' => {
datatype => 'int',
base_name => 'domainId',
description => '',
format => '',
read_only => '',
},
'vendor_account_id' => {
datatype => 'string',
base_name => 'vendorAccountId',
description => '',
format => '',
read_only => '',
},
'external_tag' => {
datatype => 'string',
base_name => 'externalTag',
description => '',
format => '',
read_only => '',
},
}
);
__PACKAGE__->swagger_types(
{
'name' => 'string',
'description' => 'string',
'deadline' => 'DateTime',
'client_id' => 'string',
'domain_id' => 'int',
'vendor_account_id' => 'string',
'external_tag' => 'string'
}
);
__PACKAGE__->attribute_map(
{
'name' => 'name',
'description' => 'description',
'deadline' => 'deadline',
'client_id' => 'clientId',
'domain_id' => 'domainId',
'vendor_account_id' => 'vendorAccountId',
'external_tag' => 'externalTag'
}
);
__PACKAGE__->mk_accessors( keys %{ __PACKAGE__->attribute_map } );
1;
| 26.729614 | 107 | 0.521355 |
ed5413a4abeff0b3d09d7017d13fe9576a26075a | 3,537 | pm | Perl | auto-lib/Paws/ELBv2/TargetHealth.pm | meis/aws-sdk-perl | 6d61ffcf351e446f06d7e84e53caa08d98573959 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/ELBv2/TargetHealth.pm | meis/aws-sdk-perl | 6d61ffcf351e446f06d7e84e53caa08d98573959 | [
"Apache-2.0"
] | 1 | 2021-05-26T19:13:58.000Z | 2021-05-26T19:13:58.000Z | auto-lib/Paws/ELBv2/TargetHealth.pm | meis/aws-sdk-perl | 6d61ffcf351e446f06d7e84e53caa08d98573959 | [
"Apache-2.0"
] | null | null | null | package Paws::ELBv2::TargetHealth;
use Moose;
has Description => (is => 'ro', isa => 'Str');
has Reason => (is => 'ro', isa => 'Str');
has State => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ELBv2::TargetHealth
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::ELBv2::TargetHealth object:
$service_obj->Method(Att1 => { Description => $value, ..., State => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::ELBv2::TargetHealth object:
$result = $service_obj->Method(...);
$result->Att1->Description
=head1 DESCRIPTION
Information about the current health of a target.
=head1 ATTRIBUTES
=head2 Description => Str
A description of the target health that provides additional details. If
the state is C<healthy>, a description is not provided.
=head2 Reason => Str
The reason code.
If the target state is C<healthy>, a reason code is not provided.
If the target state is C<initial>, the reason code can be one of the
following values:
=over
=item *
C<Elb.RegistrationInProgress> - The target is in the process of being
registered with the load balancer.
=item *
C<Elb.InitialHealthChecking> - The load balancer is still sending the
target the minimum number of health checks required to determine its
health status.
=back
If the target state is C<unhealthy>, the reason code can be one of the
following values:
=over
=item *
C<Target.ResponseCodeMismatch> - The health checks did not return an
expected HTTP code.
=item *
C<Target.Timeout> - The health check requests timed out.
=item *
C<Target.FailedHealthChecks> - The health checks failed because the
connection to the target timed out, the target response was malformed,
or the target failed the health check for an unknown reason.
=item *
C<Elb.InternalError> - The health checks failed due to an internal
error.
=back
If the target state is C<unused>, the reason code can be one of the
following values:
=over
=item *
C<Target.NotRegistered> - The target is not registered with the target
group.
=item *
C<Target.NotInUse> - The target group is not used by any load balancer
or the target is in an Availability Zone that is not enabled for its
load balancer.
=item *
C<Target.IpUnusable> - The target IP address is reserved for use by a
load balancer.
=item *
C<Target.InvalidState> - The target is in the stopped or terminated
state.
=back
If the target state is C<draining>, the reason code can be the
following value:
=over
=item *
C<Target.DeregistrationInProgress> - The target is in the process of
being deregistered and the deregistration delay period has not expired.
=back
If the target state is C<unavailable>, the reason code can be the
following value:
=over
=item *
C<Target.HealthCheckDisabled> - Health checks are disabled for the
target group.
=back
=head2 State => Str
The state of the target.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::ELBv2>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 20.805882 | 102 | 0.739044 |
ed888104b0ca02eed212d879a1a4a2ddd3d246db | 5,640 | pm | Perl | lib/Catmandu/Fix/Bind.pm | gitpan/Catmandu | 014bb49639534756856f8db46a999d879269122f | [
"Artistic-1.0"
] | null | null | null | lib/Catmandu/Fix/Bind.pm | gitpan/Catmandu | 014bb49639534756856f8db46a999d879269122f | [
"Artistic-1.0"
] | null | null | null | lib/Catmandu/Fix/Bind.pm | gitpan/Catmandu | 014bb49639534756856f8db46a999d879269122f | [
"Artistic-1.0"
] | null | null | null | package Catmandu::Fix::Bind;
use Moo::Role;
use namespace::clean;
requires 'unit';
requires 'bind';
has return => (is => 'rw', default => sub { [0]});
has fixes => (is => 'rw', default => sub { [] });
around bind => sub {
my ($orig, $self, $prev, @args) = @_;
my $next = $orig->($self,$prev,@args);
if ($self->can('plus')) {
return $self->plus($prev,$next);
}
else {
return $next;
}
};
sub unit {
my ($self,$data) = @_;
return $data;
}
sub bind {
my ($self,$data,$code,$name,$perl) = @_;
return $code->($data);
}
sub emit {
my ($self, $fixer, $label) = @_;
my $code = [ map { [ref($_) , $fixer->emit_fix($_)] } @{$self->fixes} ];
my $perl = $self->emit_bind($fixer,$code);
$perl;
}
sub emit_bind {
my ($self,$fixer,$code) = @_;
my $var = $fixer->var;
my $perl = "";
my $bind_var = $fixer->capture($self);
my $unit = $fixer->generate_var;
$perl .= "my ${unit} = ${bind_var}->unit(${var});";
for my $pair (@$code) {
my $name = $pair->[0];
my $code = $pair->[1];
$perl .= "${unit} = ${bind_var}->bind(${unit}, sub {";
$perl .= "my ${var} = shift;";
$perl .= $code;
$perl .= "${var}";
$perl .= "},'$name');"
}
if ($self->can('result')) {
$perl .= "${unit} = ${bind_var}->result(${unit});";
}
if ($self->return) {
$perl .= "${var} = ${unit};";
}
$perl;
}
=head1 NAME
Catmandu::Fix::Bind - a wrapper for Catmandu::Fix-es
=head1 SYNOPSIS
package Catmandu::Fix::Bind::demo;
use Moo;
with 'Catmandu::Fix::Bind';
sub bind {
my ($self,$data,$code,$name) = @_;
warn "executing $name";
$code->($data);
}
# in your fix script you can now write
do
demo()
fix1()
fix2()
fix3()
end
# this will execute all the fixes as expected, and print to STDERR the following messages
executing fix1
executing fix2
executing fix3
=head1 DESCRIPTION
Bind is a package that wraps Catmandu::Fix-es and other Catmandu::Bind-s together. This gives
the programmer further control on the excution of fixes. With Catmandu::Fix::Bind you can simulate
the 'before', 'after' and 'around' modifiers as found in Moo or Dancer.
To wrap Fix functions, the Fix language has a 'do' statement:
do BIND
FIX1
FIX2
FIX3
end
where BIND is a implementation of Catmandu::Fix::Bind and FIX1,...,FIXn are Catmandu::Fix functions.
In the example above the BIND will wrap FIX1, FIX2 and FIX3. BIND will first wrap the record data
using its 'unit' method and send the data sequentially to each FIX which can make inline changes
to the record data. In pseudo-code this will look like:
$bind_data = $bind->unit($data);
$bind_data = $bind->bind($bind_data, $fix1);
$bind_data = $bind->bind($bind_data, $fix2);
$bind_data = $bind->bind($bind_data, $fix3);
return $data;
An alternative form exists, 'doset' which will overwrite the record data with results of the last
fix.
doset BIND
FIX1
FIX2
FIX3
end
Will result in a pseudo code like:
$bind_data = $bind->unit($data);
$bind_data = $bind->bind($bind_data, $fix1);
$bind_data = $bind->bind($bind_data, $fix2);
$bind_data = $bind->bind($bind_data, $fix3);
return $bind_data;
A Catmandu::Fix::Bind needs to implement two methods: 'unit' and 'bind'.
=head1 METHODS
=head2 unit($data)
The unit method receives a Perl $data HASH and should return it, possibly converted to a new type.
The 'unit' method is called before all Fix methods are executed. A trivial, but verbose, implementation
of 'unit' is:
sub unit {
my ($self,$data) = @_;
my $wrapped_data = $data;
return $wrapped_data;
}
=head2 bind($wrapped_data,$code,$name,$perl)
The bind method is executed for every Catmandu::Fix method in the fix script. It receives the $wrapped_data
(wrapped by 'unit'), the fix method as anonymous subroutine and the name of the fix. It should return data
with the same type as returned by 'unit'.
A trivial, but verbose, implementaion of 'bind' is:
sub bind {
my ($self,$wrapped_data,$code,$name,$perl) = @_;
my $data = $wrapped_data;
$data = $code->($data);
# we don't need to wrap it again because the $data and $wrapped_data have the same type
$data;
}
=head1 REQUIREMENTS
Bind modules are simplified implementations of Monads. They should answer the formal definition of Monads, codified
in 3 monadic laws:
=head2 left unit: unit acts as a neutral element of bind
my $monad = Catmandu::Fix::Bind->demo();
# bind(unit(data), coderef) == unit(coderef(data))
$monad->bind( $monad->unit({foo=>'bar'}) , $coderef) == $monad->unit($coderef->({foo=>'bar'}));
=head2 right unit: unit act as a neutral element of bind
# bind(unit(data), unit) == unit(data)
$monad->bind( $monad->unit({foo=>'bar'}) , sub { $monad->unit(shift) } ) == $monad->unit({foo=>'bar'});
=head2 associative: chaining bind blocks should have the same effect as nesting them
# bind(bind(unit(data),f),g) == bind(unit(data), sub { return bind(unit(f(data)),g) } )
my $f = sub { my $data = shift; $data->{demo} = 1 ; $data };
my $g = sub { my $data = shift; $data->{demo} += 1 ; $data};
$monad->bind( $monad->bind( $monad->unit({}) , f ) , g ) ==
$monad->bind( $monad->unit({}) , sub { my $data = shift; $monad->bind($monad->unit($f->($data)), $g ); $data; });
=head1 SEE ALSO
L<Catmandu::Fix::Bind::identity>, L<Catmandu::Fix::Bind::benchmark>
=head1 AUTHOR
Patrick Hochstenbach - L<[email protected]>
=cut
1;
| 25.87156 | 118 | 0.613298 |
eddececb656bb4c02551279044346b1b76898e3d | 668 | pl | Perl | resources/playground.pl | Winis04/prolog-analyzer | c37444047c8424ebbd7f5cc5b0011857059eae3a | [
"MIT"
] | 5 | 2018-12-20T10:32:34.000Z | 2020-08-18T14:59:24.000Z | resources/playground.pl | Winis04/prolog-analyzer | c37444047c8424ebbd7f5cc5b0011857059eae3a | [
"MIT"
] | null | null | null | resources/playground.pl | Winis04/prolog-analyzer | c37444047c8424ebbd7f5cc5b0011857059eae3a | [
"MIT"
] | 1 | 2021-06-21T19:32:04.000Z | 2021-06-21T19:32:04.000Z | :- module(playground, []).
unary_kernel_call(Module,KernelFunction,ESV1,Value,WF,Expr,Type,Span) :-
(expects_waitflag_and_span(KernelFunction)
-> KernelCall =.. [KernelFunction,ESV1,Value,Span,WF]
; expects_waitflag(KernelFunction) -> KernelCall =.. [KernelFunction,ESV1,Value,WF]
; KernelCall =.. [KernelFunction,ESV1,Value]
),
%print(call(Type,KernelCall)),nl,
(type_ok_for_wf0(Type) -> %print(direct_call(Type,KernelCall)),nl,
Module:KernelCall
; reversible_unary_function(KernelFunction)
-> kernel_call_or(Module:KernelCall,ESV1,Value,WF,Expr)
; must_succ_kernel_call(Module:KernelCall,ESV1,WF,Expr)
).
| 41.75 | 89 | 0.711078 |
edc0732261fc4af6d53be4e03cd452d9277b9a29 | 5,845 | pm | Perl | modules/hp/HP/ACU.pm | gonter/aix-pm | 0fc112284fbc1d404c9e00574f2d10ea247d349d | [
"BSD-2-Clause"
] | null | null | null | modules/hp/HP/ACU.pm | gonter/aix-pm | 0fc112284fbc1d404c9e00574f2d10ea247d349d | [
"BSD-2-Clause"
] | null | null | null | modules/hp/HP/ACU.pm | gonter/aix-pm | 0fc112284fbc1d404c9e00574f2d10ea247d349d | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/perl
# $Id: ACU.pm,v 1.1 2010/10/21 11:02:16 gonter Exp $
use strict;
use Data::Dumper;
use HP::ACU::array;
package HP::ACU;
my $hpacucli= '/usr/sbin/hpacucli';
my %ignore= map { $_ => 1 } (
q[FIRMWARE UPGRADE REQUIRED: A firmware update is recommended for this controller],
q[ to prevent rare potential data write errors on a],
q[ RAID 1 or RAID 1+0 volume in a scenario of],
q[ concurrent background surface analysis and I/O write],
q[ operations. Please refer to Customer Advisory],
q[ c01587778 which can be found at hp.com.],
q[Warning: Deleting an array can cause other array letters to become renamed.],
q[ E.g. Deleting array A from arrays A,B,C will result in two remaining],
q[ arrays A,B ... not B,C],
);
# print "ignore: ", Dumper (\%ignore), "\n";
sub new
{
my $class= shift;
my %par= @_;
my $obj= { 'ctrl_slot' => 0 };
bless $obj, $class;
$obj->reset ();
foreach my $par (keys %par)
{
if ($par eq 'pd_watch')
{ # list of physical drives to watch
my $v= $par{$par};
my $p= $obj->{'pd_watch'};
$p= $obj->{'pd_watch'}= {} unless (defined($p));
if (ref($v) eq 'HASH') { map { $obj->watch ($_, $v->{$_}); } keys %$v; }
elsif (ref($v) eq 'ARRAY') { map { $obj->watch ($_); } @$v; }
}
else
{
$obj->{$par}= $par{$par};
}
}
$obj;
}
sub reset
{
my $obj= shift;
map { $obj->{$_}= {}; } qw(array pd_id ld_id);
}
=pod
=head2 $acu->watch (name, [an => av]*);
=cut
sub watch
{
my $obj= shift;
my $pd_id= shift;
my %par= @_;
my $p= $obj->{'pd_watch'}->{$pd_id};
unless (defined ($p))
{
$p= $obj->{'pd_watch'}->{$pd_id}= {};
}
$p->{'watched'}= 1;
$p->{'pd_id'}= $pd_id;
foreach my $par (keys %par)
{
$p->{$par}= $par{$par};
}
$p;
}
=pod
=head2 $acu->watched ([name]);
if name is specified, return watched disks data,
otherwise return sorted list of watched disks names.
=cut
sub watched
{
my $obj= shift;
my $name= shift;
my $w= $obj->{'pd_watch'};
unless (defined ($w))
{
$w= $obj->{'pd_watch'}= {};
}
if (defined ($name))
{
my $x= $w->{$name};
unless (defined ($x))
{
$x= $w->{$name}= {};
}
return $x;
}
return sort keys %$w;
}
sub array
{
my $obj= shift;
my $name= shift;
unless (exists ($obj->{'array'}->{$name}))
{
$obj->{'array'}->{$name}= new HP::ACU::array;
}
$obj->{'array'}->{$name};
}
sub ld_create
{
my $obj= shift;
my $drives= shift;
my $ctrl= $obj->{'ctrl_slot'};
$obj->get_cmd ("$hpacucli ctrl slot=$ctrl create type=ld drives=$drives");
}
sub ld_delete
{
my $obj= shift;
my $ld_id= shift;
my $ctrl= $obj->{'ctrl_slot'};
my $cmd= "$hpacucli ctrl slot=$ctrl ld $ld_id delete forced";
$obj->get_cmd ($cmd);
}
sub get_config
{
my $obj= shift;
my $ctrl= $obj->{'ctrl_slot'};
$obj->get_cmd ("$hpacucli ctrl slot=$ctrl pd all show");
$obj->get_cmd ("$hpacucli ctrl slot=$ctrl ld all show");
# $obj->get_cmd ("$hpacucli ctrl slot=$ctrl array all show");
my @arrays= sort keys %{$obj->{'array'}};
my $pd_watch= $obj->{'pd_watch'};
foreach my $array (@arrays)
{
# XXX next if ($array eq 'A'); # system disks!
# $obj->get_cmd ("$hpacucli ctrl slot=$ctrl array all show");
print "array=[$array]\n";
my $ua= $obj->{'array'}->{$array};
my $uap= $ua->{'pd_id'};
my $watched= 0;
foreach my $ua_disk (keys %$uap)
{
next unless (exists ($pd_watch->{$ua_disk}));
$obj->get_cmd ("$hpacucli ctrl slot=$ctrl pd $ua_disk show");
$watched++;
}
if ($watched)
{
my $ldp= $ua->{'ld_id'};
foreach my $ld_id (keys %$ldp)
{
$obj->get_cmd ("$hpacucli ctrl slot=$ctrl ld $ld_id show");
}
}
}
}
sub get_cmd
{
my $obj= shift;
my $cmd= shift;
print ">>> $cmd\n";
open (CMD, $cmd . '|') or die;
my $state= undef;
my $array= undef;
my $array_name= 'unknown';
my $physicaldrive= undef;
my $logicaldrive= undef;
my $show_lines= ($obj->{'verbose'} >= 1) ? 1 : 0;
while (<CMD>)
{
chop;
next if ($_ eq '' || exists ($ignore{$_}));
print "[$_]\n" if ($show_lines);
if ($_ =~ q[Smart Array 6i in Slot (\d+) ])
{
}
elsif ($_ eq q[ unassigned])
{
$state= 'array';
$array= $obj->array ($array_name= 'unassigned');
}
elsif ($_ =~ m[^ array (\S+)])
{
$array_name= $1;
$array= $obj->array ($array_name);
$state= 'array';
}
elsif ($_ =~ m[^ physicaldrive ((\d+):(\d+))])
{
my ($pd_id, $port, $id)= ($1, $2, $3);
$physicaldrive= $array->physicaldrive ($pd_id);
$state= 'physicaldrive';
$obj->{'pd_id'}->{$pd_id}= $array_name;
}
elsif ($_ =~ m[^ logicaldrive (\d+) ])
{ # just a listing of logical drives
my $ld_id= $1;
$logicaldrive= $array->logicaldrive ($ld_id);
$state= undef;
$obj->{'ld_id'}->{$ld_id}= $array_name;
}
elsif ($_ =~ m[^ Logical Drive: (\d+)])
{ # more details about a logical drive
my $ld_id= $1;
$logicaldrive= $array->logicaldrive ($ld_id);
$state= 'logicaldrive';
$obj->{'ld_id'}->{$ld_id}= $array_name;
}
elsif ($_ =~ m[^ (.+):\s+(.+)])
{
my ($an, $av)= ($1, $2);
if ($state eq 'physicaldrive')
{
$physicaldrive->{$an}= $av;
# push (@{$physicaldrive->{'_'}}, $_);
}
elsif ($state eq 'logicaldrive')
{
$logicaldrive->{$an}= $av;
}
else
{
goto UNKNOWN;
}
}
else
{
UNKNOWN:
print __LINE__, " >>> [$_]\n";
}
}
close (CMD);
}
1;
__END__
| 19.948805 | 85 | 0.509666 |
ed91665fc39b48fd2c98e6653aa2535d2a51fe40 | 806 | t | Perl | t/EzsignbulksendCreateObjectV1ResponseMPayloadTest.t | ezmaxinc/eZmax-SDK-perl | 3de20235136371b946247d2aed9e5e5704a4051c | [
"MIT"
] | null | null | null | t/EzsignbulksendCreateObjectV1ResponseMPayloadTest.t | ezmaxinc/eZmax-SDK-perl | 3de20235136371b946247d2aed9e5e5704a4051c | [
"MIT"
] | null | null | null | t/EzsignbulksendCreateObjectV1ResponseMPayloadTest.t | ezmaxinc/eZmax-SDK-perl | 3de20235136371b946247d2aed9e5e5704a4051c | [
"MIT"
] | null | null | null | =begin comment
eZmax API Definition (Full)
This API expose all the functionnalities for the eZmax and eZsign applications.
The version of the OpenAPI document: 1.1.7
Contact: [email protected]
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by the OpenAPI Generator
# Please update the test cases below to test the model.
# Ref: https://openapi-generator.tech
#
use Test::More tests => 2;
use Test::Exception;
use lib 'lib';
use strict;
use warnings;
use_ok('EzmaxApi::Object::EzsignbulksendCreateObjectV1ResponseMPayload');
# uncomment below and update the test
#my $instance = EzmaxApi::Object::EzsignbulksendCreateObjectV1ResponseMPayload->new();
#
#isa_ok($instance, 'EzmaxApi::Object::EzsignbulksendCreateObjectV1ResponseMPayload');
| 23.028571 | 86 | 0.776675 |
ed5963049a08ec9a03266fef406078ddf2ba59c4 | 7,356 | t | Perl | S03-operators/range-basic.t | jmaslak/roast | d69446499800e7cb274c0c240691a8199e69b22c | [
"Artistic-2.0"
] | 1 | 2019-11-06T05:07:10.000Z | 2019-11-06T05:07:10.000Z | S03-operators/range-basic.t | jmaslak/roast | d69446499800e7cb274c0c240691a8199e69b22c | [
"Artistic-2.0"
] | null | null | null | S03-operators/range-basic.t | jmaslak/roast | d69446499800e7cb274c0c240691a8199e69b22c | [
"Artistic-2.0"
] | null | null | null | use v6;
use Test;
plan 115;
sub test($range,$min,$max,$exmin,$exmax,$inf,$elems,$perl) {
subtest {
plan $elems == Inf ?? 8 !! 10;
isa-ok $range, Range, "$range.gist() is a Range";
is $range.min, $min, "$range.gist().min is 2";
is $range.max, $max, "$range.gist().max is 6";
is $range.excludes-min, $exmin, "$range.gist().excludes-min is $exmin";
is $range.excludes-max, $exmax, "$range.gist().excludes-max is $exmax";
is $range.infinite, $inf, "$range.gist().infinite is $inf";
is $range.perl, $perl, "$range.gist().perl is $perl";
if $elems == Inf {
throws-like $range.elems, X::Cannot::Lazy, :action<.elems>;
}
else {
is $range.elems, $elems, "$range.gist().elems is $elems";
my int $i;
$i = $i + 1 for Seq.new($range.iterator); # simulate for ^10
is $i, $elems, "for $range.gist() runs $elems times";
my @a = Seq.new($range.iterator); # simulate my @a = ^10
is +@a, $elems, "my @ = $range.gist() stores $elems elems";
}
}, "Testing $range.gist()"
}
test 2..6, 2, 6, False, False, False, 5, "2..6";
test -1^..7, -1, 7, True, False, False, 8, "-1^..7";
test 3..^-1, 3, -1, False, True, False, 0, "3..^-1";
test 666..42, 666, 42, False, False, False, 0, "666..42";
test "a".."g", 'a', 'g', False, False, False, 7, '"a".."g"';
test "a"..^"g", 'a', 'g', False, True, False, 6, '"a"..^"g"';
test "a"^.."g", 'a', 'g', True, False, False, 6, '"a"^.."g"';
test "a"^..^"g", 'a', 'g', True, True, False, 5, '"a"^..^"g"';
test "g".."a", 'g', 'a', False, False, False, 0, '"g".."a"';
test '!'..'&', '!', '&', False, False, False, 6, '"!".."\\&"';
test '!'..^'&', '!', '&', False, True, False, 5, '"!"..^"\\&"';
test '!'^..'&', '!', '&', True, False, False, 5, '"!"^.."\\&"';
test '!'^..^'&', '!', '&', True, True, False, 4,'"!"^..^"\\&"';
test '&'..'!', '&', '!', False, False, False, 0, '"\\&".."!"';
test ^5, 0, 5, False, True, False, 5, "^5";
test ^5.5, 0, 5.5, False, True, False, 6, "0..^5.5";
test ^5.5e0, 0, 5.5e0, False, True, False, 6, "0..^5.5e0";
test 1..*, 1, Inf, False, False, True, Inf, "1..Inf";
test 1^..*, 1, Inf, True, False, True, Inf, "1^..Inf";
test 1..^*, 1, Inf, False, True, True, Inf, "1..^Inf";
test 1^..^*, 1, Inf, True, True, True, Inf, "1^..^Inf";
test *..1, -Inf, 1, False, False, True, Inf, "-Inf..1";
test *^..1, -Inf, 1, True, False, True, Inf, "-Inf^..1";
test *..^1, -Inf, 1, False, True, True, Inf, "-Inf..^1";
test *^..^1, -Inf, 1, True, True, True, Inf, "-Inf^..^1";
test *..*, -Inf, Inf, False, False, True, Inf, "-Inf..Inf";
test *^..*, -Inf, Inf, True, False, True, Inf, "-Inf^..Inf";
test *..^*, -Inf, Inf, False, True, True, Inf, "-Inf..^Inf";
test *^..^*, -Inf, Inf, True, True, True, Inf, "-Inf^..^Inf";
# some range constructions are invalid
{
throws-like '10 .. ^20', X::Range::InvalidArg, got => ^20;
throws-like '^10 .. 20', X::Range::InvalidArg, got => ^10;
throws-like '* .. ^20', X::Range::InvalidArg, got => ^20;
throws-like '^10 .. *', X::Range::InvalidArg, got => ^10;
throws-like '* .. 42i', X::Range::InvalidArg, got => 42i;
throws-like '42i .. *', X::Range::InvalidArg, got => 42i;
throws-like '42.map({$_}) .. *', X::Range::InvalidArg, got => Seq;
throws-like '* .. 42.map({$_})', X::Range::InvalidArg, got => Seq;
}
ok 3 ~~ 1..5, '3 ~~ 1..5';
ok 2.5 ~~ 1..5, '2.5 ~~ 1..5';
ok 2.5e0 ~~ 1..5, '2.5e0 ~~ 1..5';
ok 1 ~~ 1..5, '1 ~~ 1..5';
ok 1.0 ~~ 1..5, '1.0 ~~ 1..5';
ok 1.0e0 ~~ 1..5, '1.0e0 ~~ 1..5';
ok 5 ~~ 1..5, '5 ~~ 1..5';
ok 5.0 ~~ 1..5, '5.0 ~~ 1..5';
ok 5.0e0 ~~ 1..5, '5.0e0 ~~ 1..5';
nok 0 ~~ 1..5, 'not 0 ~~ 1..5';
nok 0.999 ~~ 1..5, 'not 0.999 ~~ 1..5';
nok 0.999e0 ~~ 1..5, 'not 0.999e0 ~~ 1..5';
nok 6 ~~ 1..5, 'not 6 ~~ 1..5';
nok 5.001 ~~ 1..5, 'not 5.001 ~~ 1..5';
nok 5.001e0 ~~ 1..5, 'not 5.001e0 ~~ 1..5';
ok 3 ~~ 1^..5, '3 ~~ 1^..5';
ok 2.5 ~~ 1^..5, '2.5 ~~ 1^..5';
ok 2.5e0 ~~ 1^..5, '2.5e0 ~~ 1^..5';
nok 1 ~~ 1^..5, 'not 1 ~~ 1^..5';
nok 1.0 ~~ 1^..5, 'not 1.0 ~~ 1^..5';
nok 1.0e0 ~~ 1^..5, 'not 1.0e0 ~~ 1^..5';
ok 5 ~~ 1^..5, '5 ~~ 1^..5';
ok 5.0 ~~ 1^..5, '5.0 ~~ 1^..5';
ok 5.0e0 ~~ 1^..5, '5.0e0 ~~ 1^..5';
nok 0 ~~ 1^..5, 'not 0 ~~ 1^..5';
nok 0.999 ~~ 1^..5, 'not 0.999 ~~ 1^..5';
nok 0.999e0 ~~ 1^..5, 'not 0.999e0 ~~ 1^..5';
nok 6 ~~ 1^..5, 'not 6 ~~ 1^..5';
nok 5.001 ~~ 1^..5, 'not 5.001 ~~ 1^..5';
nok 5.001e0 ~~ 1^..5, 'not 5.001e0 ~~ 1^..5';
ok 3 ~~ 1..^5, '3 ~~ 1..^5';
ok 2.5 ~~ 1..^5, '2.5 ~~ 1..^5';
ok 2.5e0 ~~ 1..^5, '2.5e0 ~~ 1..^5';
ok 1 ~~ 1..^5, '1 ~~ 1..^5';
ok 1.0 ~~ 1..^5, '1.0 ~~ 1..^5';
ok 1.0e0 ~~ 1..^5, '1.0e0 ~~ 1..^5';
nok 5 ~~ 1..^5, 'not 5 ~~ 1..^5';
nok 5.0 ~~ 1..^5, 'not 5.0 ~~ 1..^5';
nok 5.0e0 ~~ 1..^5, 'not 5.0e0 ~~ 1..^5';
nok 0 ~~ 1..^5, 'not 0 ~~ 1..^5';
nok 0.999 ~~ 1..^5, 'not 0.999 ~~ 1..^5';
nok 0.999e0 ~~ 1..^5, 'not 0.999e0 ~~ 1..^5';
nok 6 ~~ 1..^5, 'not 6 ~~ 1..^5';
nok 5.001 ~~ 1..^5, 'not 5.001 ~~ 1..^5';
nok 5.001e0 ~~ 1..^5, 'not 5.001e0 ~~ 1..^5';
ok 3 ~~ 1^..^5, '3 ~~ 1^..^5';
ok 2.5 ~~ 1^..^5, '2.5 ~~ 1^..^5';
ok 2.5e0 ~~ 1^..^5, '2.5e0 ~~ 1^..^5';
nok 1 ~~ 1^..^5, 'not 1 ~~ 1^..^5';
nok 1.0 ~~ 1^..^5, 'not 1.0 ~~ 1^..^5';
nok 1.0e0 ~~ 1^..^5, 'not 1.0e0 ~~ 1^..^5';
nok 5 ~~ 1^..^5, 'not 5 ~~ 1^..^5';
nok 5.0 ~~ 1^..^5, 'not 5.0 ~~ 1^..^5';
nok 5.0e0 ~~ 1^..^5, 'not 5.0e0 ~~ 1^..^5';
nok 0 ~~ 1^..^5, 'not 0 ~~ 1^..^5';
nok 0.999 ~~ 1^..^5, 'not 0.999 ~~ 1^..^5';
nok 0.999e0 ~~ 1^..^5, 'not 0.999e0 ~~ 1^..^5';
nok 6 ~~ 1^..^5, 'not 6 ~~ 1^..^5';
nok 5.001 ~~ 1^..^5, 'not 5.001 ~~ 1^..^5';
nok 5.001e0 ~~ 1^..^5, 'not 5.001e0 ~~ 1^..^5';
# Tests which check to see if Range is properly doing numeric
# comparisons for numbers.
ok 6 ~~ 5..21, '6 ~~ 5..21';
ok 21 ~~ 3..50, '21 ~~ 3..50';
nok 3 ~~ 11..50, 'not 3 ~~ 11..50';
nok 21 ~~ 1..5, 'not 21 ~~ 1..5';
ok 'c' ~~ 'b'..'g', "'c' ~~ 'b'..'g'";
ok 'b' ~~ 'b'..'g', "'b' ~~ 'b'..'g'";
ok 'g' ~~ 'b'..'g', "'g' ~~ 'b'..'g'";
nok 'a' ~~ 'b'..'g', "not 'a' ~~ 'b'..'g'";
nok 'h' ~~ 'b'..'g', "not 'h' ~~ 'b'..'g'";
nok 0 ~~ 'a'..'g', "not 0 ~~ 'a'..'g'";
ok 'd' ~~ 'c'..*, "'d' ~~ 'c'..*";
nok 'b' ~~ 'c'..*, "not 'b' ~~ 'c'..*";
ok 'b' ~~ *..'c', "'b' ~~ *..'c'";
nok 'd' ~~ *..'c', "not 'd' ~~ *..'c'";
# RT #75526: [BUG] Some non-alphanumeric ranges don't work
{
ok ' ' ~~ ' '..' ', "' ' ~~ ' '..' '";
ok ' ' ~~ ' '..'A', "' ' ~~ ' '..'A'";
}
ok (1 .. *).is-lazy, "1 .. * is lazy";
ok !(1 .. 2).is-lazy, "1 .. 2 is not lazy";
# vim: ft=perl6
| 41.559322 | 79 | 0.387303 |
ede43fcf82f9071564500031660b0039e3dce966 | 27,018 | pm | Perl | RefinedSeriesModel.pm | aria-jpl/tropmap | 3ebe6fc9593b5ba8a0c48e276883be5120c72e5d | [
"Apache-2.0"
] | null | null | null | RefinedSeriesModel.pm | aria-jpl/tropmap | 3ebe6fc9593b5ba8a0c48e276883be5120c72e5d | [
"Apache-2.0"
] | null | null | null | RefinedSeriesModel.pm | aria-jpl/tropmap | 3ebe6fc9593b5ba8a0c48e276883be5120c72e5d | [
"Apache-2.0"
] | 2 | 2018-11-10T01:51:58.000Z | 2019-05-08T17:18:20.000Z | # PROC::RefinedSeriesModel.pm -*- Perl -*-
#
# This is the PROC::RefinedSeriesModel library.
# It contains routines used to retrieve refined series model parameters
# and calculate coordinates from this model.
package PROC::RefinedSeriesModel;
use ConvertDate;
##############################################################
# GetModeledCoords: return hash of refined model xyz and wgs #
# coordinates for specified epoch #
# #
# arguments: site id, filter type, year, day-of-year, #
# coordinate source id, db handle #
##############################################################
sub GetModeledCoords {
use strict;
my ($start) = (times)[0];
my ($siteID,$type,$year,$doy,$coordSourceID,$dbh,$verbose) = @_;
# get site id, type (flt/unf), ref year, ref day
# will return hash or array of neu or xyz
my (%coordInfo,%modelInfo);
# convert year, day to decyr
$doy = SOPAC::Utils::formatDay($doy);
my ($refEpoch) = ConvertDate::ydoy2decyr($year,$doy);
################################
# connect to db for below code #
################################
############
# neu loop #
############
my ($refX,$refY,$refZ,%postfitRms);
my (@comps) = ("n", "e", "u");
for my $comp (@comps) {
my ($localStart) = (times)[0];
###########################################
# get site vel id for this site/comp/type #
###########################################
my ($siteVelID) = &PROC::GeneralDB::getSiteVelID($siteID,$comp,$type,$coordSourceID,$dbh);
if ($verbose){print STDERR "aa1 $siteVelID $siteID $comp $type $coordSourceID\n"};
# no site vel id for site/type: exit
if (!($siteVelID)) {
return(undef);
}
my ($yInt,$yIntSig,$sinAnn,$cosAnn,$sinSemiAnn,$cosSemiAnn,
$startEpoch,$endEpoch,$postfitRms);
my ($dbYInt,$dbYIntSig,$dbSinAnn,$dbCosAnn,$dbSinSemiAnn,$dbCosSemiAnn,
$dbSinAnnSig,$dbCosAnnSig,$dbSinSemiAnnSig,$dbCosSemiAnnSig,
$dbStartEpoch,$dbEndEpoch,$dbRefX,$dbRefY,$dbRefZ,$dbPostfitRms);
my ($dbMsSlope,$dbMsSlopeSig,$dbMsStartEpoch,$dbMsEndEpoch,
$dbMsdDiff,$dbMsdDiffSig,$dbMsdStartEpoch,
$dbMoOffset,$dbMoOffsetSig,$dbMoCoseismic,$dbMoStartEpoch,
$dbMdDecay,$dbMdDecaySig,$dbMdTau,$dbMdStartEpoch);
####################################################
# get single value per site/comp/type model values #
####################################################
my $tables = "site_velocities sv, modeled_slopes ms, ";
$tables .= "modeled_slope_diffs msd, modeled_decays md, ";
$tables .= "modeled_offsets mo";
my $fields = "sv.y_int,sv.y_int_sig,";
$fields .= "sv.sin_ann,sv.cos_ann,sv.sin_semi_ann,sv.cos_semi_ann,";
$fields .= "sv.sin_ann_sig,sv.cos_ann_sig,";
$fields .= "sv.sin_semi_ann_sig,sv.cos_semi_ann_sig,";
$fields .= "sv.ref_x,sv.ref_y,sv.ref_z,sv.postfit_rms,";
$fields .= "to_char(sv.start_ref_epoch,\'YYYYDDD\'),";
$fields .= "to_char(sv.end_ref_epoch,\'YYYYDDD\'),";
$fields .= "ms.magnitude,ms.magnitude_sig,";
$fields .= "to_char(ms.start_ref_epoch,\'YYYYDDD\'),";
$fields .= "to_char(ms.end_ref_epoch,\'YYYYDDD\'),";
$fields .= "msd.magnitude,msd.magnitude_sig,";
$fields .= "to_char(msd.ref_epoch,\'YYYYDDD\'),";
$fields .= "mo.magnitude,mo.magnitude_sig,mo.coseismic,";
$fields .= "to_char(mo.ref_epoch,\'YYYYDDD\'),";
$fields .= "md.magnitude,md.magnitude_sig,md.tau,";
$fields .= "to_char(md.ref_epoch,\'YYYYDDD\')";
my $restrictions = "sv.site_vel_id = ? and ";
$restrictions .= "sv.site_vel_id = ms.site_vel_id (+) and ";
$restrictions .= "sv.site_vel_id = msd.site_vel_id (+) and ";
$restrictions .= "sv.site_vel_id = mo.site_vel_id (+) and ";
$restrictions .= "sv.site_vel_id = md.site_vel_id (+)";
my $sql = "select $fields from $tables where $restrictions";
my $sth = $dbh->prepare($sql);
$sth->execute($siteVelID);
$sth->bind_columns(undef, \$dbYInt,\$dbYIntSig,\$dbSinAnn,\$dbCosAnn,
\$dbSinSemiAnn,\$dbCosSemiAnn,
\$dbSinAnnSig,\$dbCosAnnSig,
\$dbSinSemiAnnSig,\$dbCosSemiAnnSig,
\$dbRefX,\$dbRefY,\$dbRefZ,\$dbPostfitRms,
\$dbStartEpoch,\$dbEndEpoch,
\$dbMsSlope,\$dbMsSlopeSig,\$dbMsStartEpoch,
\$dbMsEndEpoch,
\$dbMsdDiff,\$dbMsdDiffSig,
\$dbMsdStartEpoch,
\$dbMoOffset,\$dbMoOffsetSig,\$dbMoCoseismic,
\$dbMoStartEpoch,
\$dbMdDecay,\$dbMdDecaySig,\$dbMdTau,
\$dbMdStartEpoch);
if ($verbose){print STDERR "rsm: $sql $siteVelID\n"};
while($sth->fetch) {
$modelInfo{$comp}{various} = {
y_int => $dbYInt,
y_int_sig => $dbYIntSig,
sin_ann => $dbSinAnn,
cos_ann => $dbCosAnn,
sin_semi_ann => $dbSinSemiAnn,
cos_semi_ann => $dbCosSemiAnn,
sin_ann_sig => $dbSinAnnSig,
cos_ann_sig => $dbCosAnnSig,
sin_semi_ann_sig => $dbSinSemiAnnSig,
cos_semi_ann_sig => $dbCosSemiAnnSig,
ref_x => $dbRefX,
ref_y => $dbRefY,
ref_z => $dbRefZ,
start_epoch => $dbStartEpoch,
end_epoch => $dbEndEpoch,
postfit_rms => $dbPostfitRms,
};
# calculate annual, semi-annual terms. these are not used here
# but may be needed by external apps
$modelInfo{$comp}{various}{annual} = sqrt($dbSinAnn**2 + $dbCosAnn**2);
$modelInfo{$comp}{various}{ann_sig} = sqrt($dbSinAnnSig**2 + $dbCosAnnSig**2);
$modelInfo{$comp}{various}{ann_phase} = -(atan2(-$dbSinAnn,$dbCosAnn));
if ($modelInfo{$comp}{various}{ann_phase} < 0.) {
my ($pi) = atan2(1,1)*4;
$modelInfo{$comp}{various}{ann_phase}=$modelInfo{$comp}{various}{ann_phase}+2*$pi;
}
$modelInfo{$comp}{various}{semi_ann} = sqrt($dbSinSemiAnn**2 + $dbCosSemiAnn**2);
$modelInfo{$comp}{various}{semi_ann_sig} = sqrt($dbSinSemiAnnSig**2 + $dbCosSemiAnnSig**2);
$modelInfo{$comp}{various}{semi_ann_phase} = -(atan2(-$dbSinSemiAnn,$dbCosSemiAnn));
if ($modelInfo{$comp}{various}{semi_ann_phase} < 0.) {
my ($pi) = atan2(1,1)*4;
$modelInfo{$comp}{various}{semi_ann_phase}=$modelInfo{$comp}{various}{semi_ann_phase}+2*$pi;
}
# same for all components. use later
$refX = $dbRefX;
$refY = $dbRefY;
$refZ = $dbRefZ;
$postfitRms{$comp}=$dbPostfitRms;
if ($verbose){print STDERR "cc $comp $dbMsStartEpoch $dbMsSlope $dbMsEndEpoch\n"};
$modelInfo{$comp}{slopes}{$dbMsStartEpoch} = {
slope => $dbMsSlope,
end_epoch => $dbMsEndEpoch,
slope_sig => $dbMsSlopeSig,
};
$modelInfo{$comp}{slopeDiffs}{$dbMsdStartEpoch} = {
diff => $dbMsdDiff,
diff_sig => $dbMsdDiffSig,
};
$modelInfo{$comp}{offsets}{$dbMoStartEpoch} = {
offset => $dbMoOffset,
offset_sig => $dbMoOffsetSig,
coseismic => $dbMoCoseismic,
};
$modelInfo{$comp}{decays}{$dbMdStartEpoch} = {
decay => $dbMdDecay,
decay_sig => $dbMdDecaySig,
tau => $dbMdTau,
};
if ($verbose){
print STDERR "$dbYInt $dbSinAnn $dbCosAnn $dbStartEpoch $dbPostfitRms\n";
print STDERR "$comp $dbMdStartEpoch $dbMdDecay $dbMdDecaySig $dbMdTau\n";
}
}
my ($localEnd) = (times)[0];
if ($verbose){printf STDERR "db: %.5f \n",$localEnd-$localStart};
# end component loop
}
################################
# done retrieving model params #
# calculate modeled coords #
################################
# TO DO: if variable indicates no slopes retrieved, set coordInfo hash
# parameter "source" to "mean coordinate" and skip code that attempts
# to calculate algorithm using slopes, etc. if slopes were retrieved,
# set source parameter to "modeled velocity"
# HERE SET POSTFIT_RMS variables
for my $comp (@comps) {
my $href1 = \%{$modelInfo{$comp}{various}};
my %various = %$href1;
if ($verbose){print STDERR "various: $siteID b $various{y_int} $various{sin_ann} $various{cos_ann} $various{sin_semi_ann} $various{cos_semi_ann} $various{start_epoch} $various{end_epoch}\n"};
my ($yInt) = $various{y_int};
my ($yIntSig) = $various{y_int_sig};
my ($sinAnn) = $various{sin_ann};
my ($cosAnn) = $various{cos_ann};
my ($sinSemiAnn) = $various{sin_semi_ann};
my ($cosSemiAnn) = $various{cos_semi_ann};
# need to deref hash to get at it
$href1 = \%{$modelInfo{$comp}{slopes}};
my %slopes = %$href1;
my $slope;
for my $startEpoch(sort keys %slopes) {
if ($startEpoch) {
if ($verbose){
for my $param (sort keys %{$slopes{$startEpoch}}) {
print "slopes: $param $startEpoch $slopes{$startEpoch}{$param} ";
}
print "\n";
}
# rosanne's model only requires first slope
# these are already sorted by date from first to last
# get first slope and bail
$slope = $slopes{$startEpoch}{slope};
last;
}
}
my ($pi) = atan2(1,1)*4;
###########################################
# now have parameters needed to calculate #
# basic model coords at specific date #
###########################################
my ($refCoord);
$refCoord =
$yInt + ($slope * $refEpoch) +
($sinAnn * sin($refEpoch * 2 * $pi)) +
($cosAnn * cos($refEpoch * 2 * $pi));
$refCoord =
$refCoord + ($sinSemiAnn * sin($refEpoch * 4 * $pi)) +
($cosSemiAnn * cos($refEpoch * 4 * $pi));
################################################################
if ($verbose){print "refCoord after slopes $refCoord\n"};
# complex model parameters:
# slope diffs (vbreaks)
my $href2 = \%{$modelInfo{$comp}{slopeDiffs}};
my %slopeDiffs = %$href2;
for my $startEpoch(sort keys %slopeDiffs) {
if ($startEpoch) {
my ($dslope) = $slopeDiffs{$startEpoch}{diff};
my ($year) = substr($startEpoch,0,4);
my ($doy) = substr($startEpoch,4,3);
my ($vbreakEpoch) = ConvertDate::ydoy2decyr($year,$doy);
if ($verbose){
for my $param (sort keys %{$slopeDiffs{$startEpoch}}) {
print STDERR "slopeDiffs: $param $startEpoch $slopeDiffs{$startEpoch}{$param} ";
}
print STDERR "\n";
}
# is ref epoch greater than velocity break date?
# include following code in calculation of coords
if ($refEpoch >= $vbreakEpoch) {
$refCoord =
$refCoord + $dslope * ($refEpoch - $vbreakEpoch);
}
}
}
if ($verbose){print "refCoord after slopeDiffs: $refCoord\n"};
# offsets
my ($href3) = \%{$modelInfo{$comp}{offsets}};
my %offsets = %$href3;
for my $startEpoch(sort keys %offsets) {
if ($startEpoch) {
my ($offset) = $offsets{$startEpoch}{offset};
my ($year) = substr($startEpoch,0,4);
my ($doy) = substr($startEpoch,4,3);
my ($offsetEpoch) = ConvertDate::ydoy2decyr($year,$doy);
if ($refEpoch >= $offsetEpoch) {
$refCoord =
$refCoord + $offset;
}
if ($verbose){
for my $param (sort keys %{$offsets{$startEpoch}}) {
print STDERR "offsets: $param $startEpoch $offsets{$startEpoch}{$param} ";
}
print STDERR "\n";
}
}
}
if ($verbose){print "refCoord after offsets: $refCoord\n"};
# decays
my ($href4) = \%{$modelInfo{$comp}{decays}};
my %decays = %$href4;
for my $startEpoch(sort keys %decays) {
if ($startEpoch) {
my ($decay) = $decays{$startEpoch}{decay};
my ($tau) = $decays{$startEpoch}{tau};
my ($year) = substr($startEpoch,0,4);
my ($doy) = substr($startEpoch,4,3);
my ($tauEpoch) = ConvertDate::ydoy2decyr($year,$doy);
if ($refEpoch >= $tauEpoch) {
$refCoord =
$refCoord +
($decay *
exp( -($refEpoch - $tauEpoch) / ($tau / 365)) -
$decay);
}
if ($verbose){
for my $param (sort keys %{$offsets{$startEpoch}}) {
print STDERR "decays: $param $startEpoch $offsets{$startEpoch}{$param} ";
}
print STDERR "\n";
}
}
}
if ($verbose){print "refCoord after decays: $refCoord\n"};
$coordInfo{$comp} = $refCoord;
my ($compSigma) = $comp . "_sig";
$coordInfo{$compSigma} = $various{postfit_rms};
# END COMPONENT LOOP
}
###########################################
# get references to jacobian matrix array #
# and jacobian matrix transposed array #
###########################################
# refx,y,z, for now, are the same for each component in the db. use in
# jacobian matrix calculation
# note: create table in db to hold xyz ref values
# currently three sets of these values per site (one per neu comp)
# @jacTr: transposed matrix
my ($refJac,$refJacTransposed) = &getJacobian($refX,$refY,$refZ);
my (@jac) = @$refJac;
my (@jacTr)= @$refJacTransposed;
###########################
# calculate reference xyz #
###########################
my ($x,$y,$z);
$x = ($jacTr[1][1] * $coordInfo{n} +
$jacTr[1][2] * $coordInfo{e} +
$jacTr[1][3] * $coordInfo{u}) / 1000. +
$refX;
$y = ($jacTr[2][1] * $coordInfo{n} +
$jacTr[2][2] * $coordInfo{e} +
$jacTr[2][3] * $coordInfo{u}) / 1000. +
$refY;
$z = ($jacTr[3][1] * $coordInfo{n} +
$jacTr[3][2] * $coordInfo{e} +
$jacTr[3][3] * $coordInfo{u}) / 1000. +
$refZ;
$coordInfo{x} = sprintf ("%13.4f",$x);
$coordInfo{y} = sprintf ("%13.4f",$y);
$coordInfo{z} = sprintf ("%13.4f",$z);
########################
# calculate xyz sigmas #
########################
my @cov_xyz_epoch;
if ($postfitRms{n}) {
$cov_xyz_epoch[1][1] =
$postfitRms{n}**2 * $jacTr[1][1]**2 +
$postfitRms{e}**2 * $jacTr[1][2]**2 +
$postfitRms{u}**2 * $jacTr[1][3]**2;
$cov_xyz_epoch[2][2] =
$postfitRms{n}**2 * $jacTr[2][1]**2 +
$postfitRms{e}**2 * $jacTr[2][2]**2 +
$postfitRms{u}**2 * $jacTr[2][3]**2;
$cov_xyz_epoch[3][3] =
$postfitRms{n}**2 * $jacTr[3][1]**2 +
$postfitRms{e}**2 * $jacTr[3][2]**2 +
$postfitRms{u}**2 * $jacTr[3][3]**2;
my ($xSig,$ySig,$zSig);
$xSig = sqrt($cov_xyz_epoch[1][1])/1000. ;
$ySig = sqrt($cov_xyz_epoch[2][2])/1000. ;
$zSig = sqrt($cov_xyz_epoch[3][3])/1000. ;
$coordInfo{x_sig} = sprintf("%6.4f",$xSig);
$coordInfo{y_sig} = sprintf("%6.4f",$ySig);
$coordInfo{z_sig} = sprintf("%6.4f",$zSig);
}
return(\%coordInfo,\%modelInfo);
}
##############################################################
# GetCoordsFromModelTerms: return xyz, sigmas for epoch #
# given, using a given hash reference of model terms #
# provided to this routine
# run model terms through algorithm
#
# arguments: year, day-of-year, hash ref of model terms
##############################################################
sub GetCoordsFromModelTerms {
use strict;
my ($year,$doy,$href,$refX,$refY,$refZ,$verbose) = @_;
# dereference hash ref of model terms
my (%modelInfo) = %$href;
# convert year, day to decyr
#$doy = SOPAC::Utils::formatDay($doy);
my ($refEpoch) = &ConvertDate::ydoy2decyr($year,$doy);
################################
# connect to db for below code #
################################
############
# neu loop #
############
my (%postfitRms,%neuCoordInfo,$xSig,$ySig,$zSig);
my (@comps) = ("n", "e", "u");
for my $comp (@comps) {
my $href1 = \%{$modelInfo{$comp}{various}};
my %various = %$href1;
my ($yInt) = $various{y_int};
if (! defined $yInt) {
# No motion model - use reference as apriori (short time series)
return $refX, $refY, $refZ;
}
my ($yIntSig) = $various{y_int_sig};
my ($sinAnn) = $various{sin_ann};
my ($cosAnn) = $various{cos_ann};
my ($sinSemiAnn) = $various{sin_semi_ann};
my ($cosSemiAnn) = $various{cos_semi_ann};
$postfitRms{$comp} = $various{postfit_rms};
# need to deref hash to get at it
$href1 = \%{$modelInfo{$comp}{slopes}};
my %slopes = %$href1;
my $slope;
for my $startEpoch(sort keys %slopes) {
if ($startEpoch) {
if ($verbose){
for my $param (sort keys %{$slopes{$startEpoch}}) {
print "slopes: $param $startEpoch $slopes{$startEpoch}{$param} ";
}
print "\n";
}
# rosanne's model only requires first slope
# these are already sorted by date from first to last
# get first slope and bail
$slope = $slopes{$startEpoch}{slope};
last;
}
}
my ($pi) = atan2(1,1)*4;
###########################################
# now have parameters needed to calculate #
# basic model coords at specific date #
###########################################
my $refCoord = $yInt;
if (defined $slope) {
$refCoord += $slope * $refEpoch;
}
if (defined $sinAnn) {
$refCoord += $sinAnn * sin($refEpoch * 2 * $pi);
}
if (defined $cosAnn) {
$refCoord += $cosAnn * cos($refEpoch * 2 * $pi);
}
if (defined $sinSemiAnn) {
$refCoord += $sinSemiAnn * sin($refEpoch * 4 * $pi);
}
if (defined $cosSemiAnn) {
$refCoord += $cosSemiAnn * cos($refEpoch * 4 * $pi);
}
################################################################
if ($verbose){print "refCoord after slopes: $refCoord\n"};
# complex model parameters:
# slope diffs (vbreaks)
my $href2 = \%{$modelInfo{$comp}{slopeDiffs}};
my %slopeDiffs = %$href2;
for my $startEpoch(sort keys %slopeDiffs) {
if ($startEpoch) {
my ($dslope) = $slopeDiffs{$startEpoch}{diff};
my ($year) = substr($startEpoch,0,4);
my ($doy) = substr($startEpoch,4,3);
my ($vbreakEpoch) = ConvertDate::ydoy2decyr($year,$doy);
# is ref epoch greater than velocity break date? include following
# code in calculation of coords
if ($refEpoch >= $vbreakEpoch) {
$refCoord =
$refCoord + $dslope * ($refEpoch - $vbreakEpoch);
}
if ($verbose){
for my $param (sort keys %{$slopeDiffs{$startEpoch}}) {
print "decays: $param $startEpoch $slopeDiffs{$startEpoch}{$param} ";
}
print "\n";
}
}
}
if ($verbose){print "refCoord after slopeDiffs: $refCoord\n"};
# offsets
my ($href3) = \%{$modelInfo{$comp}{offsets}};
my %offsets = %$href3;
for my $startEpoch(sort keys %offsets) {
if ($startEpoch) {
my ($offset) = $offsets{$startEpoch}{offset};
my ($year) = substr($startEpoch,0,4);
my ($doy) = substr($startEpoch,4,3);
my ($offsetEpoch) = ConvertDate::ydoy2decyr($year,$doy);
if ($refEpoch >= $offsetEpoch) {
$refCoord =
$refCoord + $offset;
}
if ($verbose){
for my $param (sort keys %{$offsets{$startEpoch}}) {
print "decays: $param $startEpoch $offsets{$startEpoch}{$param} ";
}
print "\n";
}
}
}
if ($verbose){print "refCoord after offsets: $refCoord\n"};
# decays
my ($href4) = \%{$modelInfo{$comp}{decays}};
my %decays = %$href4;
for my $startEpoch(sort keys %decays) {
if ($startEpoch) {
my ($decay) = $decays{$startEpoch}{decay};
my ($tau) = $decays{$startEpoch}{tau};
my ($year) = substr($startEpoch,0,4);
my ($doy) = substr($startEpoch,4,3);
my ($tauEpoch) = ConvertDate::ydoy2decyr($year,$doy);
if ($refEpoch >= $tauEpoch) {
$refCoord =
$refCoord +
($decay *
exp( -($refEpoch - $tauEpoch) / ($tau / 365)) -
$decay);
}
if ($verbose){
for my $param (sort keys %{$offsets{$startEpoch}}) {
print "decays: $param $startEpoch $offsets{$startEpoch}{$param} ";
}
print "\n";
}
}
}
if ($verbose){print "refCoord after decays: $refCoord\n"};
$neuCoordInfo{$comp} = $refCoord;
my ($compSigma) = $comp . "_sig";
$neuCoordInfo{$compSigma} = $various{postfit_rms};
# END COMPONENT LOOP
}
#############################################
# we now have our neu values for this epoch #
# convert to xyz
#############################################
###########################################
# get references to jacobian matrix array #
# and jacobian matrix transposed array #
###########################################
# refx,y,z, for now, are the same for each component in the db. use in
# jacobian matrix calculation
# note: create table in db to hold xyz ref values
# currently three sets of these values per site (one per neu comp)
# @jacTr: transposed matrix
my ($refJac,$refJacTransposed) = &getJacobian($refX,$refY,$refZ);
my (@jac) = @$refJac;
my (@jacTr)= @$refJacTransposed;
###########################
# calculate reference xyz #
###########################
my ($x,$y,$z);
$x = ($jacTr[1][1] * $neuCoordInfo{n} +
$jacTr[1][2] * $neuCoordInfo{e} +
$jacTr[1][3] * $neuCoordInfo{u}) / 1000. +
$refX;
$y = ($jacTr[2][1] * $neuCoordInfo{n} +
$jacTr[2][2] * $neuCoordInfo{e} +
$jacTr[2][3] * $neuCoordInfo{u}) / 1000. +
$refY;
$z = ($jacTr[3][1] * $neuCoordInfo{n} +
$jacTr[3][2] * $neuCoordInfo{e} +
$jacTr[3][3] * $neuCoordInfo{u}) / 1000. +
$refZ;
########################
# calculate xyz sigmas #
########################
my @cov_xyz_epoch;
if ($postfitRms{n}) {
$cov_xyz_epoch[1][1] =
$postfitRms{n}**2 * $jacTr[1][1]**2 +
$postfitRms{e}**2 * $jacTr[1][2]**2 +
$postfitRms{u}**2 * $jacTr[1][3]**2;
$cov_xyz_epoch[2][2] =
$postfitRms{n}**2 * $jacTr[2][1]**2 +
$postfitRms{e}**2 * $jacTr[2][2]**2 +
$postfitRms{u}**2 * $jacTr[2][3]**2;
$cov_xyz_epoch[3][3] =
$postfitRms{n}**2 * $jacTr[3][1]**2 +
$postfitRms{e}**2 * $jacTr[3][2]**2 +
$postfitRms{u}**2 * $jacTr[3][3]**2;
$xSig = sqrt($cov_xyz_epoch[1][1])/1000. ;
$ySig = sqrt($cov_xyz_epoch[2][2])/1000. ;
$zSig = sqrt($cov_xyz_epoch[3][3])/1000. ;
}
return($x,$y,$z,$xSig,$ySig,$zSig);
###############################
# end GetCoordsFromModelTerms
###############################
}
################################################################
# GetModeledVels: return hash of refined model xyz and wgs/nad #
# velocities for specified epoch #
# #
# arguments: ref to model info, ref epoch, datum (nad/wgs) #
################################################################
sub GetModeledVels {
use strict;
my ($siteID,$href,$refEpoch,$dbh) = @_;
my (%modelInfo) = %$href;
my %slopeInfo;
for my $comp (sort keys %modelInfo) {
my $href = \%{$modelInfo{$comp}{slopes}};
my %slopes = %$href;
for my $startEpoch(sort keys %slopes) {
# we need to assign the first slope in case user wants
# slope prior to site coming online
$slopeInfo{$comp} = $slopes{$startEpoch}{slope};
my $slopeSig = $comp . "_sig";
$slopeInfo{$slopeSig} = $slopes{$startEpoch}{slope_sig};
# overwrite velocity so we use last value, unless
# start epoch is greater than year/doy provided
# to this program
my ($year) = substr($startEpoch,0,4);
my ($doy) = substr($startEpoch,4,3);
my ($decStartEpoch) = ConvertDate::ydoy2decyr($year,$doy);
if ($startEpoch && $refEpoch > $decStartEpoch) {
$slopeInfo{$comp} = $slopes{$startEpoch}{slope};
$slopeInfo{$slopeSig} = $slopes{$startEpoch}{slope_sig};
}
}
}
# do we have all neu slope values? if not, skip calc of xyz vels
my ($xVel,$yVel,$zVel) = 0.0;
my ($latVel,$lonVel,$htVel) = 0.0;
if ($slopeInfo{n} && $slopeInfo{e} && $slopeInfo{u}) {
# get lat/lon
# use operational weekly globk as source
my ($refToGeodArrayRef) =
&SOPAC::SiteCoordinates::Geodetic::get($dbh,
{
-site_id => $siteID,
-precision => "8",
-tokens =>
["lat", "lon", "source_id"]
}
);
if (defined($refToGeodArrayRef)) {
# we provided source id, so only get single array returned
while (@$refToGeodArrayRef) {
my ($geodArrayRef) = shift @$refToGeodArrayRef;
my $lat = $$geodArrayRef[0];
my $lon = $$geodArrayRef[1];
if ($lat) {
# vel is same for wgs or nad
$latVel = $slopeInfo{n}/1000.;
$lonVel = $slopeInfo{e}/1000.;
$htVel = $slopeInfo{u}/1000.;
# get xyz vel
($xVel,$yVel,$zVel) = &PROC::TransformCoords::vneu2vxyz($lat,$lon,$latVel,$lonVel,$htVel);
last;
}
}
}
}
return ($xVel,$yVel,$zVel,$latVel,$lonVel,$htVel);
}
sub getJacobian {
my ($x,$y,$z) = @_;
# algorithm from Simon's xyztogeo.c and Rosanne's xyzJacobian.m
# use jac to rotate vectors as
#
# n x x n
# e = jac * y or y = jac' * e
# u z z u
#
# or
#
# neu = xyz * jac' or xyz = neu * jac
my (@jac,@jacTransposed);
my ($earth_rad) = 6378137.0;
my ($f) = 1.0 / 298.257222101;
my ($pi) = atan2(1,1)*4;
my ($deg2rad) = $pi / 180.0;
my ($twopi) = $pi * 2.0;
my ($tolerance) = 0.0001;
my ($eccsq) = 2.0 * $f - $f * $f;
my ($eq_radius) = sqrt($x*$x+$y*$y);
my ($lat_p) = atan2($z, $eq_radius);
my ($lon_i) = atan2($y,$x);
if ($lon_i < 0.0) {
$lon_i = $lon_i + $twopi;
}
my ($h_p) = 0.0;
my ($niter) = 0;
my ($converged) = 0;
my ($rad_curve,$rad_lat,$lat_i,$h_i);
while ($converged == 0) {
$rad_curve = $earth_rad / sqrt(1.0 - $eccsq * sin($lat_p) * sin($lat_p));
$rad_lat = $eq_radius * ( 1.0 - $eccsq * $rad_curve / ($rad_curve + $h_p) );
$lat_i = atan2($z, $rad_lat);
#$h_i;
if (abs($lat_i) < ($pi / 4.0)) {
$h_i = $eq_radius / cos($lat_i) - $rad_curve;
}
else {
$h_i = $z / sin($lat_i) - (1.0 - $eccsq) * $rad_curve;
}
if (abs($h_i - $h_p) < $tolerance &&
(abs($lat_i - $lat_p) * $rad_curve) < $tolerance) {
$converged = 1;
}
$niter=$niter+1;
if ($niter > 50) {
print STDERR "xyztogeo error : failure to converge";
$converged = 1;
exit();
}
$h_p = $h_i;
$lat_p = $lat_i;
}
#jac = [-sin(lat_i)*cos(lon_i) -sin(lat_i)*sin(lon_i) cos(lat_i);
# -sin(lon_i) cos(lon_i) 0.0;
# cos(lat_i)*cos(lon_i) cos(lat_i)*sin(lon_i) sin(lat_i)];
$jac[1][1] = -sin($lat_i) * cos($lon_i);
$jac[1][2] = -sin($lat_i) * sin($lon_i);
$jac[1][3] = cos($lat_i);
$jac[2][1] = -sin($lon_i);
$jac[2][2] = cos($lon_i);
$jac[2][3] = 0.0;
$jac[3][1] = cos($lat_i) * cos($lon_i);
$jac[3][2] = cos($lat_i) * sin($lon_i);
$jac[3][3] = sin($lat_i);
$jacTransposed[1][1] = -sin($lat_i) * cos($lon_i);
$jacTransposed[1][2] = -sin($lon_i);
$jacTransposed[1][3] = cos($lat_i) * cos($lon_i);
$jacTransposed[2][1] = -sin($lat_i) * sin($lon_i);
$jacTransposed[2][2] = cos($lon_i);
$jacTransposed[2][3] = cos($lat_i) * sin($lon_i);
$jacTransposed[3][1] = cos($lat_i);
$jacTransposed[3][2] = 0.0;
$jacTransposed[3][3] = sin($lat_i);
return(\@jac,\@jacTransposed);
}
1;
| 28.712009 | 193 | 0.543267 |
ed5c2568bebe3652fb7471abc36da9f902c67169 | 6,399 | pm | Perl | lib/Cfn/Resource/AWS/SSM/ResourceDataSync.pm | torrentalle/cfn-perl | f5fd3b9e16bbdf6bf3d7e7d6850dfe0f16888aaf | [
"Apache-2.0"
] | null | null | null | lib/Cfn/Resource/AWS/SSM/ResourceDataSync.pm | torrentalle/cfn-perl | f5fd3b9e16bbdf6bf3d7e7d6850dfe0f16888aaf | [
"Apache-2.0"
] | null | null | null | lib/Cfn/Resource/AWS/SSM/ResourceDataSync.pm | torrentalle/cfn-perl | f5fd3b9e16bbdf6bf3d7e7d6850dfe0f16888aaf | [
"Apache-2.0"
] | null | null | null | # AWS::SSM::ResourceDataSync generated from spec 14.3.0
use Moose::Util::TypeConstraints;
coerce 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync',
from 'HashRef',
via { Cfn::Resource::Properties::AWS::SSM::ResourceDataSync->new( %$_ ) };
package Cfn::Resource::AWS::SSM::ResourceDataSync {
use Moose;
extends 'Cfn::Resource';
has Properties => (isa => 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync', is => 'rw', coerce => 1);
sub AttributeList {
[ ]
}
sub supported_regions {
[ 'ap-northeast-1','ap-northeast-2','ap-south-1','ap-southeast-1','ap-southeast-2','ca-central-1','cn-north-1','cn-northwest-1','eu-central-1','eu-west-1','eu-west-2','eu-west-3','sa-east-1','us-east-1','us-east-2','us-gov-west-1','us-west-1','us-west-2' ]
}
}
subtype 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::AwsOrganizationsSource',
as 'Cfn::Value';
coerce 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::AwsOrganizationsSource',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
return Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::AwsOrganizationsSourceValue->new( %$_ );
}
};
package Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::AwsOrganizationsSourceValue {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Value::TypedValue';
has OrganizationalUnits => (isa => 'Cfn::Value::Array|Cfn::Value::Function|Cfn::DynamicValue', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has OrganizationSourceType => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
}
subtype 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::SyncSource',
as 'Cfn::Value';
coerce 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::SyncSource',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
return Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::SyncSourceValue->new( %$_ );
}
};
package Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::SyncSourceValue {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Value::TypedValue';
has AwsOrganizationsSource => (isa => 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::AwsOrganizationsSource', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has IncludeFutureRegions => (isa => 'Cfn::Value::Boolean', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has SourceRegions => (isa => 'Cfn::Value::Array|Cfn::Value::Function|Cfn::DynamicValue', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has SourceType => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
}
subtype 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::S3Destination',
as 'Cfn::Value';
coerce 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::S3Destination',
from 'HashRef',
via {
if (my $f = Cfn::TypeLibrary::try_function($_)) {
return $f
} else {
return Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::S3DestinationValue->new( %$_ );
}
};
package Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::S3DestinationValue {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Value::TypedValue';
has BucketName => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has BucketPrefix => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has BucketRegion => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has KMSKeyArn => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has SyncFormat => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
}
package Cfn::Resource::Properties::AWS::SSM::ResourceDataSync {
use Moose;
use MooseX::StrictConstructor;
extends 'Cfn::Resource::Properties';
has BucketName => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has BucketPrefix => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has BucketRegion => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has KMSKeyArn => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has S3Destination => (isa => 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::S3Destination', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has SyncFormat => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has SyncName => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, required => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
has SyncSource => (isa => 'Cfn::Resource::Properties::AWS::SSM::ResourceDataSync::SyncSource', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Mutable');
has SyncType => (isa => 'Cfn::Value::String', is => 'rw', coerce => 1, traits => [ 'CfnMutability' ], mutability => 'Immutable');
}
1;
### main pod documentation begin ###
=encoding UTF-8
=head1 NAME
Cfn::Resource::AWS::SSM::ResourceDataSync - Cfn resource for AWS::SSM::ResourceDataSync
=head1 DESCRIPTION
This module implements a Perl module that represents the CloudFormation object AWS::SSM::ResourceDataSync.
See L<Cfn> for more information on how to use it.
=head1 AUTHOR
Jose Luis Martinez
CAPSiDE
[email protected]
=head1 COPYRIGHT and LICENSE
Copyright (c) 2013 by CAPSiDE
This code is distributed under the Apache 2 License. The full text of the
license can be found in the LICENSE file included with this module.
=cut
| 46.369565 | 260 | 0.652289 |
ed4c435e1c4f11cd3c8a2815a8fbdc61da9e1b30 | 4,135 | al | Perl | benchmark/benchmarks/FASP-benchmarks/data/delaunay-3d/delaunay3d-0151-160-1080.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
] | null | null | null | benchmark/benchmarks/FASP-benchmarks/data/delaunay-3d/delaunay3d-0151-160-1080.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
] | null | null | null | benchmark/benchmarks/FASP-benchmarks/data/delaunay-3d/delaunay3d-0151-160-1080.al | krzysg/FaspHeuristic | 1929c40e3fbc49e68b04acfc5522539a18758031 | [
"MIT"
] | null | null | null | 1 11 14 34 109 149 158 160
2 18 49 56 88 119 121 134 146
3 25 73 133
4 2 27 56 61 88 93 119 135
5 1 6 23 60 158 159 160
6 1 14 28 34 64 124
7 25 79 92 110 132 156
8 12 33 56 70 74
9 41 48 57 61 83 130 138 155
10 13 28 52 68 124 127
11 5 6 149 158 160
12 2 54 119 121 125 134
13 8 14 34 44 56 74 107
14 5 11 16 51 56 64 98 126 159
15 16 28 39 123 139 147
16 22 39 56 64 81 115 140 147
17 43 68 122 123 137 141
18 8 12 50 54 70 119
19 16 52 69 81 115
20 17 35 48 112 131
21 48 59 67 77 96 110 118
22 19 56 81 98 126 147
23 44 59 111 139
24 12 26 101 119 155
25 21 46 100 156
26 40 54 71 73 155
27 49 56 61 66 79 84 88 105 132 135 138
28 52 58 139 147 159
29 84 91 105 113
30 8 10 33 54 74 94 127
31 45 50 54 70 71 99 128 129 152
32 7 14 56 59 60 92 109 126 148
33 93 107 108
34 14 44 85 101 103 109 144 149
35 17 131 140
36 68 78 81 122 127 128
37 17 20 26 35 41 48 65 95 102 141 151
38 23 46 60 67 136 156 160
39 22 86 123 147 159
40 24 82 116 130 151 155
41 96 131 132 145 150
42 10 15 28 52 58 75 81 94 115 127
43 20 35 58 86 87 95 122 123 137 147
44 1 5 6 10 28 111 125 139 144 159 160
45 37 78 129 141
46 7 23 32 62 67 72 126 132 143 158
47 72 112 118 136 137
48 41 96 110 112 118 131 145
49 104 134 153
50 54 71 120 121 142 146
51 13 32 34 85 149
52 13 74 93 108 115 124
53 38 86 98 111 123 137
54 24 71 119 125 127 129
55 20 48 87 137
56 33 49 51 52 64 70 79 85 92 93 104 126 134 146
57 41 73 82 83 130 132 138
58 10 36 69 80 94 102 122 128
59 3 7 25 46 62 67 72 100 110 114 143 148 156
60 11 14 23 39 46 53 67 111 139 143
61 50 56 63 93 120 135 146
62 29 32 51 149
63 9 50 57 71 120 131 132
64 13 19 28 51 52 124
65 45 76 78 131 141 151
66 29 84 90 148 153
67 23 72 77
68 28 30 43 44 58 94 111 122 123 127 139
69 22 35 39 86 98 120 140 147
70 33 50 61 81 93 157
71 24 45 65 76 78 120 129 131 141 142
72 21 23 38 77 136
73 41 48 130 150
74 33 42 75 107 108 124
75 10 30 94 107 157
76 31 37 45 50 80 102 120 131
77 38 47 110 117
78 31 76 127
79 29 32 61 66 148
80 35 36 43 69 81 102 120 128 147
81 31 58 69 115 120 127 128
82 3 26 73 105 113 130 150
83 26 37 40 41 48 63 65 73 82 141 151
84 2 49 85 101 154
85 13 29 49 62 91 97 101 103 104 109 149
86 35 117 123 147
87 17 20 21 47 48 68 72 118 136 145
88 49 56 84 105 153
89 3 7 79 82 132 138 150
90 27 29 79 89 105 133 138 153
91 3 59 62 97 143 149
92 9 35 41 46 61 63 67 79 95 120 126 131
93 16 19 22 64 69 81 140 157
94 10 127 152
95 9 20 35 41 48 112
96 25 59 110 118 132 145
97 27 29 32 49 51 56 62 79 148
98 16 35 38 39 60 86 92 117 123 140
99 8 12 18 30 33 50 54 70 119 121 152
100 3 7 89 91 114 133
101 2 29 49 91 105 144 149 154
102 17 35 43 45 78 120 122 131
103 13 101 104 125
104 2 13 134
105 2 4 24 40 66 84 106 113 116 119 130 138 155
106 18 24 26 40 50 54 135 142
107 8 10 30 42 52 124 125
108 8 13 19 42 56 70 75 93 152
109 14 51 60 62 97 143 149
110 25 41 92 95 118
111 38 47 72 87 123 136
112 53 55 77 87 110 118 136 137
113 3 66 91 100
114 62 91 113 133 148
115 10 15 28 58 64 157 159
116 24 26 82 83 130
117 35 38 43 53 67 92 95 112 136 156
118 55 72 77 145
119 88 101 106 125 155
120 31 35 70 93 128 131 140 142
121 4 8 18 70 106 119 135 146
122 78 80 127 147
123 38 60 87 136 137
124 13 14 28 34 44 115 159
125 2 8 10 24 30 99 101 104 144 154
126 16 38 60 67 98 117
127 58 125 157
128 76 78 102 122 127
129 30 36 78 81 99 127 128 157
130 63 83 138 155
131 9 37 83 95
132 9 25 32 61 79 92 110 138 155 156
133 66 79 82 89 105 113 138
134 4 8 13 103 121 125 144
135 50 105 119 146 155
136 53 77
137 20 86 87 95 117 136
138 61 66 79 82 150
139 5 6 39 111 123 159
140 19 22 56 61 92 126
141 26 54 78 102 122 127 129 151
142 24 26 54 63 76 131 151 155
143 32 62
144 8 10 13 33 74 103 107 108 149
145 3 20 21 25 59 72 73 82 150
146 4 8 12 70 93 134
147 17 35 42 44 58 68 81 102 115 123 139 159
148 7 29 62 89 90 91 100 113 132 133
149 44 60 125 143 158
150 3 7 25 57 96 100 132 133
151 24 26 54 63 71 131 155
152 30 33 70 74 75 127 129 157
153 27 29 56 79 84 85 97 101
154 2 49 85 103 104 119
155 27 50 61 63 83 106 138
156 21 46 67 77 92 95 110 112
157 19 31 42 52 58 81 94 108
158 14 32 60 109 126 143
159 6 15 16 60 64 98 126
160 23 46 59 60 72 91 139 143 149 158 | 25.84375 | 48 | 0.700363 |
edc18b492800f7f5a941cd28924e762f7b876f1b | 1,795 | pm | Perl | perl5/src/local-lib-1.008004/inc/Module/Install/Win32.pm | br/home | bb0d89936b5cb4ba6633d6c1271c3b2c28c67d7f | [
"Apache-2.0"
] | 1 | 2022-03-22T01:40:28.000Z | 2022-03-22T01:40:28.000Z | inc/Module/Install/Win32.pm | gitpan/Finance-MtGox | e4e8d6bce5b9db2b15109c9776b2ba6e0be8c0ce | [
"MIT",
"X11",
"Unlicense"
] | null | null | null | inc/Module/Install/Win32.pm | gitpan/Finance-MtGox | e4e8d6bce5b9db2b15109c9776b2ba6e0be8c0ce | [
"MIT",
"X11",
"Unlicense"
] | 1 | 2022-03-22T01:40:43.000Z | 2022-03-22T01:40:43.000Z | #line 1
package Module::Install::Win32;
use strict;
use Module::Install::Base ();
use vars qw{$VERSION @ISA $ISCORE};
BEGIN {
$VERSION = '1.00';
@ISA = 'Module::Install::Base';
$ISCORE = 1;
}
# determine if the user needs nmake, and download it if needed
sub check_nmake {
my $self = shift;
$self->load('can_run');
$self->load('get_file');
require Config;
return unless (
$^O eq 'MSWin32' and
$Config::Config{make} and
$Config::Config{make} =~ /^nmake\b/i and
! $self->can_run('nmake')
);
print "The required 'nmake' executable not found, fetching it...\n";
require File::Basename;
my $rv = $self->get_file(
url => 'http://download.microsoft.com/download/vc15/Patch/1.52/W95/EN-US/Nmake15.exe',
ftp_url => 'ftp://ftp.microsoft.com/Softlib/MSLFILES/Nmake15.exe',
local_dir => File::Basename::dirname($^X),
size => 51928,
run => 'Nmake15.exe /o > nul',
check_for => 'Nmake.exe',
remove => 1,
);
die <<'END_MESSAGE' unless $rv;
-------------------------------------------------------------------------------
Since you are using Microsoft Windows, you will need the 'nmake' utility
before installation. It's available at:
http://download.microsoft.com/download/vc15/Patch/1.52/W95/EN-US/Nmake15.exe
or
ftp://ftp.microsoft.com/Softlib/MSLFILES/Nmake15.exe
Please download the file manually, save it to a directory in %PATH% (e.g.
C:\WINDOWS\COMMAND\), then launch the MS-DOS command line shell, "cd" to
that directory, and run "Nmake15.exe" from there; that will create the
'nmake.exe' file needed by this module.
You may then resume the installation process described in README.
-------------------------------------------------------------------------------
END_MESSAGE
}
1;
| 27.615385 | 94 | 0.605014 |
73f498a56bf0bb969867465e16ef8ecb5b3ab3ab | 313 | pl | Perl | trivium.pl | hacktor/Cipher-Stream-Trivium | cddb24724c5c3881bd99ae0e4eeafb791b282ecd | [
"Unlicense"
] | null | null | null | trivium.pl | hacktor/Cipher-Stream-Trivium | cddb24724c5c3881bd99ae0e4eeafb791b282ecd | [
"Unlicense"
] | null | null | null | trivium.pl | hacktor/Cipher-Stream-Trivium | cddb24724c5c3881bd99ae0e4eeafb791b282ecd | [
"Unlicense"
] | null | null | null | #!/usr/bin/perl
use strict;
use warnings;
use FindBin;
use lib "$FindBin::Bin/lib";
use Cipher::Stream::Trivium;
use Data::Dumper;
my $T = Cipher::Stream::Trivium->new(key => '0110' x 20, iv => 3141562);
$T->init();
my $stream;
for (1..10000) {
$stream = $T->next(64);
print "Cryptostream: $stream\n";
}
| 16.473684 | 72 | 0.629393 |
edd610ab36311d195ea6563f13d6de4f2fc5922e | 1,236 | pm | Perl | perl/vendor/lib/DateTime/TimeZone/Indian/Cocos.pm | mnikolop/Thesis_project_CyberDoc | 9a37fdd5a31de24cb902ee31ef19eb992faa1665 | [
"Apache-2.0"
] | 4 | 2018-04-20T07:27:13.000Z | 2021-12-21T05:19:24.000Z | perl/vendor/lib/DateTime/TimeZone/Indian/Cocos.pm | mnikolop/Thesis_project_CyberDoc | 9a37fdd5a31de24cb902ee31ef19eb992faa1665 | [
"Apache-2.0"
] | 4 | 2021-03-10T19:10:00.000Z | 2021-05-11T14:58:19.000Z | perl/vendor/lib/DateTime/TimeZone/Indian/Cocos.pm | mnikolop/Thesis_project_CyberDoc | 9a37fdd5a31de24cb902ee31ef19eb992faa1665 | [
"Apache-2.0"
] | 1 | 2019-11-12T02:29:26.000Z | 2019-11-12T02:29:26.000Z | # This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
#
# Generated from /tmp/rnClxBLdxJ/australasia. Olson data version 2013a
#
# Do not edit this file directly.
#
package DateTime::TimeZone::Indian::Cocos;
{
$DateTime::TimeZone::Indian::Cocos::VERSION = '1.57';
}
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::Indian::Cocos::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
59926671140, # utc_end 1899-12-31 17:32:20 (Sun)
DateTime::TimeZone::NEG_INFINITY, # local_start
59926694400, # local_end 1900-01-01 00:00:00 (Mon)
23260,
0,
'LMT',
],
[
59926671140, # utc_start 1899-12-31 17:32:20 (Sun)
DateTime::TimeZone::INFINITY, # utc_end
59926694540, # local_start 1900-01-01 00:02:20 (Mon)
DateTime::TimeZone::INFINITY, # local_end
23400,
0,
'CCT',
],
];
sub olson_version { '2013a' }
sub has_dst_changes { 0 }
sub _max_year { 2023 }
sub _new_instance
{
return shift->_init( @_, spans => $spans );
}
1;
| 20.6 | 87 | 0.68932 |
edcc33df7b779923207a1986d645cbf53ef8d23f | 1,470 | pm | Perl | core/server/OpenXPKI/Server/Authentication/ClientX509.pm | MichalMMac/openxpki | 5843d53c60b7d19e0562772fdc0beecf98187363 | [
"Apache-2.0"
] | null | null | null | core/server/OpenXPKI/Server/Authentication/ClientX509.pm | MichalMMac/openxpki | 5843d53c60b7d19e0562772fdc0beecf98187363 | [
"Apache-2.0"
] | null | null | null | core/server/OpenXPKI/Server/Authentication/ClientX509.pm | MichalMMac/openxpki | 5843d53c60b7d19e0562772fdc0beecf98187363 | [
"Apache-2.0"
] | null | null | null | package OpenXPKI::Server::Authentication::ClientX509;
use strict;
use warnings;
use English;
use OpenXPKI::Debug;
use OpenXPKI::Exception;
use OpenXPKI::Server::Context qw( CTX );
use DateTime;
use Data::Dumper;
use Moose;
extends 'OpenXPKI::Server::Authentication::X509';
sub login_step {
##! 1: 'start'
my $self = shift;
my $arg_ref = shift;
my $msg = $arg_ref->{MESSAGE};
my $params = $msg->{PARAMS};
if (! $params->{certificate} ) {
##! 4: 'no login data received (yet)'
return (undef, undef, {
SERVICE_MSG => "GET_CLIENT_X509_LOGIN",
PARAMS => {
NAME => $self->label(),
DESCRIPTION => $self->description(),
},
});
}
##! 2: "credentials ... present"
my $trust_anchors = $self->trust_anchors();
##! 32: 'trust anchors ' . Dumper $trust_anchors
my $validate = CTX('api2')->validate_certificate(
pem => $params->{certificate},
chain => $params->{chain} // [],
anchor => $trust_anchors,
);
return $self->_validation_result( $validate );
}
1;
__END__
=head1 Name
OpenXPKI::Server::Authentication::ClientX509 - support for client based X509 authentication.
=head1 Description
Leaves the SSL negotation to the client, requires the certificate chain of the authenticated
client to be passed.
See OpenXPKI::Server::Authentication::X509 for configuration and options.
| 21.940299 | 92 | 0.614286 |
ed219b002bf5d066023da0d5294e88cc3d106151 | 42,102 | pm | Perl | Mojoqq/perl/vendor/lib/DBIx/Class/Schema.pm | ghuan/Mojo-webqq-for-windows | ad44014da4578f99aa3efad0b55f0fc3bc3af322 | [
"Unlicense"
] | null | null | null | Mojoqq/perl/vendor/lib/DBIx/Class/Schema.pm | ghuan/Mojo-webqq-for-windows | ad44014da4578f99aa3efad0b55f0fc3bc3af322 | [
"Unlicense"
] | null | null | null | Mojoqq/perl/vendor/lib/DBIx/Class/Schema.pm | ghuan/Mojo-webqq-for-windows | ad44014da4578f99aa3efad0b55f0fc3bc3af322 | [
"Unlicense"
] | null | null | null | package DBIx::Class::Schema;
use strict;
use warnings;
use base 'DBIx::Class';
use DBIx::Class::Carp;
use Try::Tiny;
use Scalar::Util qw/weaken blessed/;
use DBIx::Class::_Util qw(refcount quote_sub);
use Devel::GlobalDestruction;
use namespace::clean;
__PACKAGE__->mk_classdata('class_mappings' => {});
__PACKAGE__->mk_classdata('source_registrations' => {});
__PACKAGE__->mk_classdata('storage_type' => '::DBI');
__PACKAGE__->mk_classdata('storage');
__PACKAGE__->mk_classdata('exception_action');
__PACKAGE__->mk_classdata('stacktrace' => $ENV{DBIC_TRACE} || 0);
__PACKAGE__->mk_classdata('default_resultset_attributes' => {});
=head1 NAME
DBIx::Class::Schema - composable schemas
=head1 SYNOPSIS
package Library::Schema;
use base qw/DBIx::Class::Schema/;
# load all Result classes in Library/Schema/Result/
__PACKAGE__->load_namespaces();
package Library::Schema::Result::CD;
use base qw/DBIx::Class::Core/;
__PACKAGE__->load_components(qw/InflateColumn::DateTime/); # for example
__PACKAGE__->table('cd');
# Elsewhere in your code:
my $schema1 = Library::Schema->connect(
$dsn,
$user,
$password,
{ AutoCommit => 1 },
);
my $schema2 = Library::Schema->connect($coderef_returning_dbh);
# fetch objects using Library::Schema::Result::DVD
my $resultset = $schema1->resultset('DVD')->search( ... );
my @dvd_objects = $schema2->resultset('DVD')->search( ... );
=head1 DESCRIPTION
Creates database classes based on a schema. This is the recommended way to
use L<DBIx::Class> and allows you to use more than one concurrent connection
with your classes.
NB: If you're used to L<Class::DBI> it's worth reading the L</SYNOPSIS>
carefully, as DBIx::Class does things a little differently. Note in
particular which module inherits off which.
=head1 SETUP METHODS
=head2 load_namespaces
=over 4
=item Arguments: %options?
=back
package MyApp::Schema;
__PACKAGE__->load_namespaces();
__PACKAGE__->load_namespaces(
result_namespace => 'Res',
resultset_namespace => 'RSet',
default_resultset_class => '+MyApp::Othernamespace::RSet',
);
With no arguments, this method uses L<Module::Find> to load all of the
Result and ResultSet classes under the namespace of the schema from
which it is called. For example, C<My::Schema> will by default find
and load Result classes named C<My::Schema::Result::*> and ResultSet
classes named C<My::Schema::ResultSet::*>.
ResultSet classes are associated with Result class of the same name.
For example, C<My::Schema::Result::CD> will get the ResultSet class
C<My::Schema::ResultSet::CD> if it is present.
Both Result and ResultSet namespaces are configurable via the
C<result_namespace> and C<resultset_namespace> options.
Another option, C<default_resultset_class> specifies a custom default
ResultSet class for Result classes with no corresponding ResultSet.
All of the namespace and classname options are by default relative to
the schema classname. To specify a fully-qualified name, prefix it
with a literal C<+>. For example, C<+Other::NameSpace::Result>.
=head3 Warnings
You will be warned if ResultSet classes are discovered for which there
are no matching Result classes like this:
load_namespaces found ResultSet class $classname with no corresponding Result class
If a ResultSource instance is found to already have a ResultSet class set
using L<resultset_class|DBIx::Class::ResultSource/resultset_class> to some
other class, you will be warned like this:
We found ResultSet class '$rs_class' for '$result_class', but it seems
that you had already set '$result_class' to use '$rs_set' instead
=head3 Examples
# load My::Schema::Result::CD, My::Schema::Result::Artist,
# My::Schema::ResultSet::CD, etc...
My::Schema->load_namespaces;
# Override everything to use ugly names.
# In this example, if there is a My::Schema::Res::Foo, but no matching
# My::Schema::RSets::Foo, then Foo will have its
# resultset_class set to My::Schema::RSetBase
My::Schema->load_namespaces(
result_namespace => 'Res',
resultset_namespace => 'RSets',
default_resultset_class => 'RSetBase',
);
# Put things in other namespaces
My::Schema->load_namespaces(
result_namespace => '+Some::Place::Results',
resultset_namespace => '+Another::Place::RSets',
);
To search multiple namespaces for either Result or ResultSet classes,
use an arrayref of namespaces for that option. In the case that the
same result (or resultset) class exists in multiple namespaces, later
entries in the list of namespaces will override earlier ones.
My::Schema->load_namespaces(
# My::Schema::Results_C::Foo takes precedence over My::Schema::Results_B::Foo :
result_namespace => [ 'Results_A', 'Results_B', 'Results_C' ],
resultset_namespace => [ '+Some::Place::RSets', 'RSets' ],
);
=cut
# Pre-pends our classname to the given relative classname or
# class namespace, unless there is a '+' prefix, which will
# be stripped.
sub _expand_relative_name {
my ($class, $name) = @_;
$name =~ s/^\+// or $name = "${class}::${name}";
return $name;
}
# Finds all modules in the supplied namespace, or if omitted in the
# namespace of $class. Untaints all findings as they can be assumed
# to be safe
sub _findallmod {
require Module::Find;
return map
{ $_ =~ /(.+)/ } # untaint result
Module::Find::findallmod( $_[1] || ref $_[0] || $_[0] )
;
}
# returns a hash of $shortname => $fullname for every package
# found in the given namespaces ($shortname is with the $fullname's
# namespace stripped off)
sub _map_namespaces {
my ($me, $namespaces) = @_;
my %res;
for my $ns (@$namespaces) {
$res{ substr($_, length "${ns}::") } = $_
for $me->_findallmod($ns);
}
\%res;
}
# returns the result_source_instance for the passed class/object,
# or dies with an informative message (used by load_namespaces)
sub _ns_get_rsrc_instance {
my $me = shift;
my $rs_class = ref ($_[0]) || $_[0];
return try {
$rs_class->result_source_instance
} catch {
$me->throw_exception (
"Attempt to load_namespaces() class $rs_class failed - are you sure this is a real Result Class?: $_"
);
};
}
sub load_namespaces {
my ($class, %args) = @_;
my $result_namespace = delete $args{result_namespace} || 'Result';
my $resultset_namespace = delete $args{resultset_namespace} || 'ResultSet';
my $default_resultset_class = delete $args{default_resultset_class};
$default_resultset_class = $class->_expand_relative_name($default_resultset_class)
if $default_resultset_class;
$class->throw_exception('load_namespaces: unknown option(s): '
. join(q{,}, map { qq{'$_'} } keys %args))
if scalar keys %args;
for my $arg ($result_namespace, $resultset_namespace) {
$arg = [ $arg ] if ( $arg and ! ref $arg );
$class->throw_exception('load_namespaces: namespace arguments must be '
. 'a simple string or an arrayref')
if ref($arg) ne 'ARRAY';
$_ = $class->_expand_relative_name($_) for (@$arg);
}
my $results_by_source_name = $class->_map_namespaces($result_namespace);
my $resultsets_by_source_name = $class->_map_namespaces($resultset_namespace);
my @to_register;
{
no warnings qw/redefine/;
local *Class::C3::reinitialize = sub { } if DBIx::Class::_ENV_::OLD_MRO;
use warnings qw/redefine/;
# ensure classes are loaded and attached in inheritance order
for my $result_class (values %$results_by_source_name) {
$class->ensure_class_loaded($result_class);
}
my %inh_idx;
my @source_names_by_subclass_last = sort {
($inh_idx{$a} ||=
scalar @{mro::get_linear_isa( $results_by_source_name->{$a} )}
)
<=>
($inh_idx{$b} ||=
scalar @{mro::get_linear_isa( $results_by_source_name->{$b} )}
)
} keys(%$results_by_source_name);
foreach my $source_name (@source_names_by_subclass_last) {
my $result_class = $results_by_source_name->{$source_name};
my $preset_resultset_class = $class->_ns_get_rsrc_instance ($result_class)->resultset_class;
my $found_resultset_class = delete $resultsets_by_source_name->{$source_name};
if($preset_resultset_class && $preset_resultset_class ne 'DBIx::Class::ResultSet') {
if($found_resultset_class && $found_resultset_class ne $preset_resultset_class) {
carp "We found ResultSet class '$found_resultset_class' matching '$results_by_source_name->{$source_name}', but it seems "
. "that you had already set the '$results_by_source_name->{$source_name}' resultet to '$preset_resultset_class' instead";
}
}
# elsif - there may be *no* default_resultset_class, in which case we fallback to
# DBIx::Class::Resultset and there is nothing to check
elsif($found_resultset_class ||= $default_resultset_class) {
$class->ensure_class_loaded($found_resultset_class);
if(!$found_resultset_class->isa("DBIx::Class::ResultSet")) {
carp "load_namespaces found ResultSet class '$found_resultset_class' that does not subclass DBIx::Class::ResultSet";
}
$class->_ns_get_rsrc_instance ($result_class)->resultset_class($found_resultset_class);
}
my $source_name = $class->_ns_get_rsrc_instance ($result_class)->source_name || $source_name;
push(@to_register, [ $source_name, $result_class ]);
}
}
foreach (sort keys %$resultsets_by_source_name) {
carp "load_namespaces found ResultSet class '$resultsets_by_source_name->{$_}' "
.'with no corresponding Result class';
}
Class::C3->reinitialize if DBIx::Class::_ENV_::OLD_MRO;
$class->register_class(@$_) for (@to_register);
return;
}
=head2 load_classes
=over 4
=item Arguments: @classes?, { $namespace => [ @classes ] }+
=back
L</load_classes> is an alternative method to L</load_namespaces>, both of
which serve similar purposes, each with different advantages and disadvantages.
In the general case you should use L</load_namespaces>, unless you need to
be able to specify that only specific classes are loaded at runtime.
With no arguments, this method uses L<Module::Find> to find all classes under
the schema's namespace. Otherwise, this method loads the classes you specify
(using L<use>), and registers them (using L</"register_class">).
It is possible to comment out classes with a leading C<#>, but note that perl
will think it's a mistake (trying to use a comment in a qw list), so you'll
need to add C<no warnings 'qw';> before your load_classes call.
If any classes found do not appear to be Result class files, you will
get the following warning:
Failed to load $comp_class. Can't find source_name method. Is
$comp_class really a full DBIC result class? Fix it, move it elsewhere,
or make your load_classes call more specific.
Example:
My::Schema->load_classes(); # loads My::Schema::CD, My::Schema::Artist,
# etc. (anything under the My::Schema namespace)
# loads My::Schema::CD, My::Schema::Artist, Other::Namespace::Producer but
# not Other::Namespace::LinerNotes nor My::Schema::Track
My::Schema->load_classes(qw/ CD Artist #Track /, {
Other::Namespace => [qw/ Producer #LinerNotes /],
});
=cut
sub load_classes {
my ($class, @params) = @_;
my %comps_for;
if (@params) {
foreach my $param (@params) {
if (ref $param eq 'ARRAY') {
# filter out commented entries
my @modules = grep { $_ !~ /^#/ } @$param;
push (@{$comps_for{$class}}, @modules);
}
elsif (ref $param eq 'HASH') {
# more than one namespace possible
for my $comp ( keys %$param ) {
# filter out commented entries
my @modules = grep { $_ !~ /^#/ } @{$param->{$comp}};
push (@{$comps_for{$comp}}, @modules);
}
}
else {
# filter out commented entries
push (@{$comps_for{$class}}, $param) if $param !~ /^#/;
}
}
} else {
my @comp = map { substr $_, length "${class}::" }
$class->_findallmod($class);
$comps_for{$class} = \@comp;
}
my @to_register;
{
no warnings qw/redefine/;
local *Class::C3::reinitialize = sub { } if DBIx::Class::_ENV_::OLD_MRO;
use warnings qw/redefine/;
foreach my $prefix (keys %comps_for) {
foreach my $comp (@{$comps_for{$prefix}||[]}) {
my $comp_class = "${prefix}::${comp}";
$class->ensure_class_loaded($comp_class);
my $snsub = $comp_class->can('source_name');
if(! $snsub ) {
carp "Failed to load $comp_class. Can't find source_name method. Is $comp_class really a full DBIC result class? Fix it, move it elsewhere, or make your load_classes call more specific.";
next;
}
$comp = $snsub->($comp_class) || $comp;
push(@to_register, [ $comp, $comp_class ]);
}
}
}
Class::C3->reinitialize if DBIx::Class::_ENV_::OLD_MRO;
foreach my $to (@to_register) {
$class->register_class(@$to);
}
}
=head2 storage_type
=over 4
=item Arguments: $storage_type|{$storage_type, \%args}
=item Return Value: $storage_type|{$storage_type, \%args}
=item Default value: DBIx::Class::Storage::DBI
=back
Set the storage class that will be instantiated when L</connect> is called.
If the classname starts with C<::>, the prefix C<DBIx::Class::Storage> is
assumed by L</connect>.
You want to use this to set subclasses of L<DBIx::Class::Storage::DBI>
in cases where the appropriate subclass is not autodetected.
If your storage type requires instantiation arguments, those are
defined as a second argument in the form of a hashref and the entire
value needs to be wrapped into an arrayref or a hashref. We support
both types of refs here in order to play nice with your
Config::[class] or your choice. See
L<DBIx::Class::Storage::DBI::Replicated> for an example of this.
=head2 exception_action
=over 4
=item Arguments: $code_reference
=item Return Value: $code_reference
=item Default value: None
=back
When L</throw_exception> is invoked and L</exception_action> is set to a code
reference, this reference will be called instead of
L<DBIx::Class::Exception/throw>, with the exception message passed as the only
argument.
Your custom throw code B<must> rethrow the exception, as L</throw_exception> is
an integral part of DBIC's internal execution control flow.
Example:
package My::Schema;
use base qw/DBIx::Class::Schema/;
use My::ExceptionClass;
__PACKAGE__->exception_action(sub { My::ExceptionClass->throw(@_) });
__PACKAGE__->load_classes;
# or:
my $schema_obj = My::Schema->connect( .... );
$schema_obj->exception_action(sub { My::ExceptionClass->throw(@_) });
=head2 stacktrace
=over 4
=item Arguments: boolean
=back
Whether L</throw_exception> should include stack trace information.
Defaults to false normally, but defaults to true if C<$ENV{DBIC_TRACE}>
is true.
=head2 sqlt_deploy_hook
=over
=item Arguments: $sqlt_schema
=back
An optional sub which you can declare in your own Schema class that will get
passed the L<SQL::Translator::Schema> object when you deploy the schema via
L</create_ddl_dir> or L</deploy>.
For an example of what you can do with this, see
L<DBIx::Class::Manual::Cookbook/Adding Indexes And Functions To Your SQL>.
Note that sqlt_deploy_hook is called by L</deployment_statements>, which in turn
is called before L</deploy>. Therefore the hook can be used only to manipulate
the L<SQL::Translator::Schema> object before it is turned into SQL fed to the
database. If you want to execute post-deploy statements which can not be generated
by L<SQL::Translator>, the currently suggested method is to overload L</deploy>
and use L<dbh_do|DBIx::Class::Storage::DBI/dbh_do>.
=head1 METHODS
=head2 connect
=over 4
=item Arguments: @connectinfo
=item Return Value: $new_schema
=back
Creates and returns a new Schema object. The connection info set on it
is used to create a new instance of the storage backend and set it on
the Schema object.
See L<DBIx::Class::Storage::DBI/"connect_info"> for DBI-specific
syntax on the C<@connectinfo> argument, or L<DBIx::Class::Storage> in
general.
Note that C<connect_info> expects an arrayref of arguments, but
C<connect> does not. C<connect> wraps its arguments in an arrayref
before passing them to C<connect_info>.
=head3 Overloading
C<connect> is a convenience method. It is equivalent to calling
$schema->clone->connection(@connectinfo). To write your own overloaded
version, overload L</connection> instead.
=cut
sub connect { shift->clone->connection(@_) }
=head2 resultset
=over 4
=item Arguments: L<$source_name|DBIx::Class::ResultSource/source_name>
=item Return Value: L<$resultset|DBIx::Class::ResultSet>
=back
my $rs = $schema->resultset('DVD');
Returns the L<DBIx::Class::ResultSet> object for the registered source
name.
=cut
sub resultset {
my ($self, $source_name) = @_;
$self->throw_exception('resultset() expects a source name')
unless defined $source_name;
return $self->source($source_name)->resultset;
}
=head2 sources
=over 4
=item Return Value: L<@source_names|DBIx::Class::ResultSource/source_name>
=back
my @source_names = $schema->sources;
Lists names of all the sources registered on this Schema object.
=cut
sub sources { keys %{shift->source_registrations} }
=head2 source
=over 4
=item Arguments: L<$source_name|DBIx::Class::ResultSource/source_name>
=item Return Value: L<$result_source|DBIx::Class::ResultSource>
=back
my $source = $schema->source('Book');
Returns the L<DBIx::Class::ResultSource> object for the registered
source name.
=cut
sub source {
my $self = shift;
$self->throw_exception("source() expects a source name")
unless @_;
my $source_name = shift;
my $sreg = $self->source_registrations;
return $sreg->{$source_name} if exists $sreg->{$source_name};
# if we got here, they probably passed a full class name
my $mapped = $self->class_mappings->{$source_name};
$self->throw_exception("Can't find source for ${source_name}")
unless $mapped && exists $sreg->{$mapped};
return $sreg->{$mapped};
}
=head2 class
=over 4
=item Arguments: L<$source_name|DBIx::Class::ResultSource/source_name>
=item Return Value: $classname
=back
my $class = $schema->class('CD');
Retrieves the Result class name for the given source name.
=cut
sub class {
return shift->source(shift)->result_class;
}
=head2 txn_do
=over 4
=item Arguments: C<$coderef>, @coderef_args?
=item Return Value: The return value of $coderef
=back
Executes C<$coderef> with (optional) arguments C<@coderef_args> atomically,
returning its result (if any). Equivalent to calling $schema->storage->txn_do.
See L<DBIx::Class::Storage/"txn_do"> for more information.
This interface is preferred over using the individual methods L</txn_begin>,
L</txn_commit>, and L</txn_rollback> below.
WARNING: If you are connected with C<< AutoCommit => 0 >> the transaction is
considered nested, and you will still need to call L</txn_commit> to write your
changes when appropriate. You will also want to connect with C<< auto_savepoint =>
1 >> to get partial rollback to work, if the storage driver for your database
supports it.
Connecting with C<< AutoCommit => 1 >> is recommended.
=cut
sub txn_do {
my $self = shift;
$self->storage or $self->throw_exception
('txn_do called on $schema without storage');
$self->storage->txn_do(@_);
}
=head2 txn_scope_guard
Runs C<txn_scope_guard> on the schema's storage. See
L<DBIx::Class::Storage/txn_scope_guard>.
=cut
sub txn_scope_guard {
my $self = shift;
$self->storage or $self->throw_exception
('txn_scope_guard called on $schema without storage');
$self->storage->txn_scope_guard(@_);
}
=head2 txn_begin
Begins a transaction (does nothing if AutoCommit is off). Equivalent to
calling $schema->storage->txn_begin. See
L<DBIx::Class::Storage/"txn_begin"> for more information.
=cut
sub txn_begin {
my $self = shift;
$self->storage or $self->throw_exception
('txn_begin called on $schema without storage');
$self->storage->txn_begin;
}
=head2 txn_commit
Commits the current transaction. Equivalent to calling
$schema->storage->txn_commit. See L<DBIx::Class::Storage/"txn_commit">
for more information.
=cut
sub txn_commit {
my $self = shift;
$self->storage or $self->throw_exception
('txn_commit called on $schema without storage');
$self->storage->txn_commit;
}
=head2 txn_rollback
Rolls back the current transaction. Equivalent to calling
$schema->storage->txn_rollback. See
L<DBIx::Class::Storage/"txn_rollback"> for more information.
=cut
sub txn_rollback {
my $self = shift;
$self->storage or $self->throw_exception
('txn_rollback called on $schema without storage');
$self->storage->txn_rollback;
}
=head2 storage
my $storage = $schema->storage;
Returns the L<DBIx::Class::Storage> object for this Schema. Grab this
if you want to turn on SQL statement debugging at runtime, or set the
quote character. For the default storage, the documentation can be
found in L<DBIx::Class::Storage::DBI>.
=head2 populate
=over 4
=item Arguments: L<$source_name|DBIx::Class::ResultSource/source_name>, [ \@column_list, \@row_values+ ] | [ \%col_data+ ]
=item Return Value: L<\@result_objects|DBIx::Class::Manual::ResultClass> (scalar context) | L<@result_objects|DBIx::Class::Manual::ResultClass> (list context)
=back
A convenience shortcut to L<DBIx::Class::ResultSet/populate>. Equivalent to:
$schema->resultset($source_name)->populate([...]);
=over 4
=item NOTE
The context of this method call has an important effect on what is
submitted to storage. In void context data is fed directly to fastpath
insertion routines provided by the underlying storage (most often
L<DBI/execute_for_fetch>), bypassing the L<new|DBIx::Class::Row/new> and
L<insert|DBIx::Class::Row/insert> calls on the
L<Result|DBIx::Class::Manual::ResultClass> class, including any
augmentation of these methods provided by components. For example if you
are using something like L<DBIx::Class::UUIDColumns> to create primary
keys for you, you will find that your PKs are empty. In this case you
will have to explicitly force scalar or list context in order to create
those values.
=back
=cut
sub populate {
my ($self, $name, $data) = @_;
my $rs = $self->resultset($name)
or $self->throw_exception("'$name' is not a resultset");
return $rs->populate($data);
}
=head2 connection
=over 4
=item Arguments: @args
=item Return Value: $new_schema
=back
Similar to L</connect> except sets the storage object and connection
data in-place on the Schema class. You should probably be calling
L</connect> to get a proper Schema object instead.
=head3 Overloading
Overload C<connection> to change the behaviour of C<connect>.
=cut
sub connection {
my ($self, @info) = @_;
return $self if !@info && $self->storage;
my ($storage_class, $args) = ref $self->storage_type
? $self->_normalize_storage_type($self->storage_type)
: $self->storage_type
;
$storage_class =~ s/^::/DBIx::Class::Storage::/;
try {
$self->ensure_class_loaded ($storage_class);
}
catch {
$self->throw_exception(
"Unable to load storage class ${storage_class}: $_"
);
};
my $storage = $storage_class->new( $self => $args||{} );
$storage->connect_info(\@info);
$self->storage($storage);
return $self;
}
sub _normalize_storage_type {
my ($self, $storage_type) = @_;
if(ref $storage_type eq 'ARRAY') {
return @$storage_type;
} elsif(ref $storage_type eq 'HASH') {
return %$storage_type;
} else {
$self->throw_exception('Unsupported REFTYPE given: '. ref $storage_type);
}
}
=head2 compose_namespace
=over 4
=item Arguments: $target_namespace, $additional_base_class?
=item Return Value: $new_schema
=back
For each L<DBIx::Class::ResultSource> in the schema, this method creates a
class in the target namespace (e.g. $target_namespace::CD,
$target_namespace::Artist) that inherits from the corresponding classes
attached to the current schema.
It also attaches a corresponding L<DBIx::Class::ResultSource> object to the
new $schema object. If C<$additional_base_class> is given, the new composed
classes will inherit from first the corresponding class from the current
schema then the base class.
For example, for a schema with My::Schema::CD and My::Schema::Artist classes,
$schema->compose_namespace('My::DB', 'Base::Class');
print join (', ', @My::DB::CD::ISA) . "\n";
print join (', ', @My::DB::Artist::ISA) ."\n";
will produce the output
My::Schema::CD, Base::Class
My::Schema::Artist, Base::Class
=cut
# this might be oversimplified
# sub compose_namespace {
# my ($self, $target, $base) = @_;
# my $schema = $self->clone;
# foreach my $source_name ($schema->sources) {
# my $source = $schema->source($source_name);
# my $target_class = "${target}::${source_name}";
# $self->inject_base(
# $target_class => $source->result_class, ($base ? $base : ())
# );
# $source->result_class($target_class);
# $target_class->result_source_instance($source)
# if $target_class->can('result_source_instance');
# $schema->register_source($source_name, $source);
# }
# return $schema;
# }
sub compose_namespace {
my ($self, $target, $base) = @_;
my $schema = $self->clone;
$schema->source_registrations({});
# the original class-mappings must remain - otherwise
# reverse_relationship_info will not work
#$schema->class_mappings({});
{
no warnings qw/redefine/;
local *Class::C3::reinitialize = sub { } if DBIx::Class::_ENV_::OLD_MRO;
use warnings qw/redefine/;
foreach my $source_name ($self->sources) {
my $orig_source = $self->source($source_name);
my $target_class = "${target}::${source_name}";
$self->inject_base($target_class, $orig_source->result_class, ($base || ()) );
# register_source examines result_class, and then returns us a clone
my $new_source = $schema->register_source($source_name, bless
{ %$orig_source, result_class => $target_class },
ref $orig_source,
);
if ($target_class->can('result_source_instance')) {
# give the class a schema-less source copy
$target_class->result_source_instance( bless
{ %$new_source, schema => ref $new_source->{schema} || $new_source->{schema} },
ref $new_source,
);
}
}
quote_sub "${target}::${_}" => "shift->schema->$_(\@_)"
for qw(class source resultset);
}
Class::C3->reinitialize() if DBIx::Class::_ENV_::OLD_MRO;
return $schema;
}
sub setup_connection_class {
my ($class, $target, @info) = @_;
$class->inject_base($target => 'DBIx::Class::DB');
#$target->load_components('DB');
$target->connection(@info);
}
=head2 svp_begin
Creates a new savepoint (does nothing outside a transaction).
Equivalent to calling $schema->storage->svp_begin. See
L<DBIx::Class::Storage/"svp_begin"> for more information.
=cut
sub svp_begin {
my ($self, $name) = @_;
$self->storage or $self->throw_exception
('svp_begin called on $schema without storage');
$self->storage->svp_begin($name);
}
=head2 svp_release
Releases a savepoint (does nothing outside a transaction).
Equivalent to calling $schema->storage->svp_release. See
L<DBIx::Class::Storage/"svp_release"> for more information.
=cut
sub svp_release {
my ($self, $name) = @_;
$self->storage or $self->throw_exception
('svp_release called on $schema without storage');
$self->storage->svp_release($name);
}
=head2 svp_rollback
Rollback to a savepoint (does nothing outside a transaction).
Equivalent to calling $schema->storage->svp_rollback. See
L<DBIx::Class::Storage/"svp_rollback"> for more information.
=cut
sub svp_rollback {
my ($self, $name) = @_;
$self->storage or $self->throw_exception
('svp_rollback called on $schema without storage');
$self->storage->svp_rollback($name);
}
=head2 clone
=over 4
=item Arguments: %attrs?
=item Return Value: $new_schema
=back
Clones the schema and its associated result_source objects and returns the
copy. The resulting copy will have the same attributes as the source schema,
except for those attributes explicitly overridden by the provided C<%attrs>.
=cut
sub clone {
my $self = shift;
my $clone = {
(ref $self ? %$self : ()),
(@_ == 1 && ref $_[0] eq 'HASH' ? %{ $_[0] } : @_),
};
bless $clone, (ref $self || $self);
$clone->$_(undef) for qw/class_mappings source_registrations storage/;
$clone->_copy_state_from($self);
return $clone;
}
# Needed in Schema::Loader - if you refactor, please make a compatibility shim
# -- Caelum
sub _copy_state_from {
my ($self, $from) = @_;
$self->class_mappings({ %{$from->class_mappings} });
$self->source_registrations({ %{$from->source_registrations} });
foreach my $source_name ($from->sources) {
my $source = $from->source($source_name);
my $new = $source->new($source);
# we use extra here as we want to leave the class_mappings as they are
# but overwrite the source_registrations entry with the new source
$self->register_extra_source($source_name => $new);
}
if ($from->storage) {
$self->storage($from->storage);
$self->storage->set_schema($self);
}
}
=head2 throw_exception
=over 4
=item Arguments: $message
=back
Throws an exception. Obeys the exemption rules of L<DBIx::Class::Carp> to report
errors from outer-user's perspective. See L</exception_action> for details on overriding
this method's behavior. If L</stacktrace> is turned on, C<throw_exception>'s
default behavior will provide a detailed stack trace.
=cut
sub throw_exception {
my $self = shift;
if (my $act = $self->exception_action) {
if ($act->(@_)) {
DBIx::Class::Exception->throw(
"Invocation of the exception_action handler installed on $self did *not*"
.' result in an exception. DBIx::Class is unable to function without a reliable'
.' exception mechanism, ensure that exception_action does not hide exceptions'
." (original error: $_[0])"
);
}
carp_unique (
"The exception_action handler installed on $self returned false instead"
.' of throwing an exception. This behavior has been deprecated, adjust your'
.' handler to always rethrow the supplied error.'
);
}
DBIx::Class::Exception->throw($_[0], $self->stacktrace);
}
=head2 deploy
=over 4
=item Arguments: \%sqlt_args, $dir
=back
Attempts to deploy the schema to the current storage using L<SQL::Translator>.
See L<SQL::Translator/METHODS> for a list of values for C<\%sqlt_args>.
The most common value for this would be C<< { add_drop_table => 1 } >>
to have the SQL produced include a C<DROP TABLE> statement for each table
created. For quoting purposes supply C<quote_identifiers>.
Additionally, the DBIx::Class parser accepts a C<sources> parameter as a hash
ref or an array ref, containing a list of source to deploy. If present, then
only the sources listed will get deployed. Furthermore, you can use the
C<add_fk_index> parser parameter to prevent the parser from creating an index for each
FK.
=cut
sub deploy {
my ($self, $sqltargs, $dir) = @_;
$self->throw_exception("Can't deploy without storage") unless $self->storage;
$self->storage->deploy($self, undef, $sqltargs, $dir);
}
=head2 deployment_statements
=over 4
=item Arguments: See L<DBIx::Class::Storage::DBI/deployment_statements>
=item Return Value: $listofstatements
=back
A convenient shortcut to
C<< $self->storage->deployment_statements($self, @args) >>.
Returns the statements used by L</deploy> and
L<DBIx::Class::Storage/deploy>.
=cut
sub deployment_statements {
my $self = shift;
$self->throw_exception("Can't generate deployment statements without a storage")
if not $self->storage;
$self->storage->deployment_statements($self, @_);
}
=head2 create_ddl_dir
=over 4
=item Arguments: See L<DBIx::Class::Storage::DBI/create_ddl_dir>
=back
A convenient shortcut to
C<< $self->storage->create_ddl_dir($self, @args) >>.
Creates an SQL file based on the Schema, for each of the specified
database types, in the given directory.
=cut
sub create_ddl_dir {
my $self = shift;
$self->throw_exception("Can't create_ddl_dir without storage") unless $self->storage;
$self->storage->create_ddl_dir($self, @_);
}
=head2 ddl_filename
=over 4
=item Arguments: $database-type, $version, $directory, $preversion
=item Return Value: $normalised_filename
=back
my $filename = $table->ddl_filename($type, $version, $dir, $preversion)
This method is called by C<create_ddl_dir> to compose a file name out of
the supplied directory, database type and version number. The default file
name format is: C<$dir$schema-$version-$type.sql>.
You may override this method in your schema if you wish to use a different
format.
WARNING
Prior to DBIx::Class version 0.08100 this method had a different signature:
my $filename = $table->ddl_filename($type, $dir, $version, $preversion)
In recent versions variables $dir and $version were reversed in order to
bring the signature in line with other Schema/Storage methods. If you
really need to maintain backward compatibility, you can do the following
in any overriding methods:
($dir, $version) = ($version, $dir) if ($DBIx::Class::VERSION < 0.08100);
=cut
sub ddl_filename {
my ($self, $type, $version, $dir, $preversion) = @_;
require File::Spec;
$version = "$preversion-$version" if $preversion;
my $class = blessed($self) || $self;
$class =~ s/::/-/g;
return File::Spec->catfile($dir, "$class-$version-$type.sql");
}
=head2 thaw
Provided as the recommended way of thawing schema objects. You can call
C<Storable::thaw> directly if you wish, but the thawed objects will not have a
reference to any schema, so are rather useless.
=cut
sub thaw {
my ($self, $obj) = @_;
local $DBIx::Class::ResultSourceHandle::thaw_schema = $self;
return Storable::thaw($obj);
}
=head2 freeze
This doesn't actually do anything beyond calling L<nfreeze|Storable/SYNOPSIS>,
it is just provided here for symmetry.
=cut
sub freeze {
return Storable::nfreeze($_[1]);
}
=head2 dclone
=over 4
=item Arguments: $object
=item Return Value: dcloned $object
=back
Recommended way of dcloning L<DBIx::Class::Row> and L<DBIx::Class::ResultSet>
objects so their references to the schema object
(which itself is B<not> cloned) are properly maintained.
=cut
sub dclone {
my ($self, $obj) = @_;
local $DBIx::Class::ResultSourceHandle::thaw_schema = $self;
return Storable::dclone($obj);
}
=head2 schema_version
Returns the current schema class' $VERSION in a normalised way.
=cut
sub schema_version {
my ($self) = @_;
my $class = ref($self)||$self;
# does -not- use $schema->VERSION
# since that varies in results depending on if version.pm is installed, and if
# so the perl or XS versions. If you want this to change, bug the version.pm
# author to make vpp and vxs behave the same.
my $version;
{
no strict 'refs';
$version = ${"${class}::VERSION"};
}
return $version;
}
=head2 register_class
=over 4
=item Arguments: $source_name, $component_class
=back
This method is called by L</load_namespaces> and L</load_classes> to install the found classes into your Schema. You should be using those instead of this one.
You will only need this method if you have your Result classes in
files which are not named after the packages (or all in the same
file). You may also need it to register classes at runtime.
Registers a class which isa DBIx::Class::ResultSourceProxy. Equivalent to
calling:
$schema->register_source($source_name, $component_class->result_source_instance);
=cut
sub register_class {
my ($self, $source_name, $to_register) = @_;
$self->register_source($source_name => $to_register->result_source_instance);
}
=head2 register_source
=over 4
=item Arguments: $source_name, L<$result_source|DBIx::Class::ResultSource>
=back
This method is called by L</register_class>.
Registers the L<DBIx::Class::ResultSource> in the schema with the given
source name.
=cut
sub register_source { shift->_register_source(@_) }
=head2 unregister_source
=over 4
=item Arguments: $source_name
=back
Removes the L<DBIx::Class::ResultSource> from the schema for the given source name.
=cut
sub unregister_source { shift->_unregister_source(@_) }
=head2 register_extra_source
=over 4
=item Arguments: $source_name, L<$result_source|DBIx::Class::ResultSource>
=back
As L</register_source> but should be used if the result class already
has a source and you want to register an extra one.
=cut
sub register_extra_source { shift->_register_source(@_, { extra => 1 }) }
sub _register_source {
my ($self, $source_name, $source, $params) = @_;
$source = $source->new({ %$source, source_name => $source_name });
$source->schema($self);
weaken $source->{schema} if ref($self);
my %reg = %{$self->source_registrations};
$reg{$source_name} = $source;
$self->source_registrations(\%reg);
return $source if $params->{extra};
my $rs_class = $source->result_class;
if ($rs_class and my $rsrc = try { $rs_class->result_source_instance } ) {
my %map = %{$self->class_mappings};
if (
exists $map{$rs_class}
and
$map{$rs_class} ne $source_name
and
$rsrc ne $_[2] # orig_source
) {
carp
"$rs_class already had a registered source which was replaced by this call. "
. 'Perhaps you wanted register_extra_source(), though it is more likely you did '
. 'something wrong.'
;
}
$map{$rs_class} = $source_name;
$self->class_mappings(\%map);
}
return $source;
}
my $global_phase_destroy;
sub DESTROY {
### NO detected_reinvoked_destructor check
### This code very much relies on being called multuple times
return if $global_phase_destroy ||= in_global_destruction;
my $self = shift;
my $srcs = $self->source_registrations;
for my $source_name (keys %$srcs) {
# find first source that is not about to be GCed (someone other than $self
# holds a reference to it) and reattach to it, weakening our own link
#
# during global destruction (if we have not yet bailed out) this should throw
# which will serve as a signal to not try doing anything else
# however beware - on older perls the exception seems randomly untrappable
# due to some weird race condition during thread joining :(((
if (length ref $srcs->{$source_name} and refcount($srcs->{$source_name}) > 1) {
local $@;
eval {
$srcs->{$source_name}->schema($self);
weaken $srcs->{$source_name};
1;
} or do {
$global_phase_destroy = 1;
};
last;
}
}
}
sub _unregister_source {
my ($self, $source_name) = @_;
my %reg = %{$self->source_registrations};
my $source = delete $reg{$source_name};
$self->source_registrations(\%reg);
if ($source->result_class) {
my %map = %{$self->class_mappings};
delete $map{$source->result_class};
$self->class_mappings(\%map);
}
}
=head2 compose_connection (DEPRECATED)
=over 4
=item Arguments: $target_namespace, @db_info
=item Return Value: $new_schema
=back
DEPRECATED. You probably wanted compose_namespace.
Actually, you probably just wanted to call connect.
=begin hidden
(hidden due to deprecation)
Calls L<DBIx::Class::Schema/"compose_namespace"> to the target namespace,
calls L<DBIx::Class::Schema/connection> with @db_info on the new schema,
then injects the L<DBix::Class::ResultSetProxy> component and a
resultset_instance classdata entry on all the new classes, in order to support
$target_namespaces::$class->search(...) method calls.
This is primarily useful when you have a specific need for class method access
to a connection. In normal usage it is preferred to call
L<DBIx::Class::Schema/connect> and use the resulting schema object to operate
on L<DBIx::Class::ResultSet> objects with L<DBIx::Class::Schema/resultset> for
more information.
=end hidden
=cut
sub compose_connection {
my ($self, $target, @info) = @_;
carp_once "compose_connection deprecated as of 0.08000"
unless $INC{"DBIx/Class/CDBICompat.pm"};
my $base = 'DBIx::Class::ResultSetProxy';
try {
eval "require ${base};"
}
catch {
$self->throw_exception
("No arguments to load_classes and couldn't load ${base} ($_)")
};
if ($self eq $target) {
# Pathological case, largely caused by the docs on early C::M::DBIC::Plain
foreach my $source_name ($self->sources) {
my $source = $self->source($source_name);
my $class = $source->result_class;
$self->inject_base($class, $base);
$class->mk_classdata(resultset_instance => $source->resultset);
$class->mk_classdata(class_resolver => $self);
}
$self->connection(@info);
return $self;
}
my $schema = $self->compose_namespace($target, $base);
quote_sub "${target}::schema", '$s', { '$s' => \$schema };
$schema->connection(@info);
foreach my $source_name ($schema->sources) {
my $source = $schema->source($source_name);
my $class = $source->result_class;
#warn "$source_name $class $source ".$source->storage;
$class->mk_classdata(result_source_instance => $source);
$class->mk_classdata(resultset_instance => $source->resultset);
$class->mk_classdata(class_resolver => $schema);
}
return $schema;
}
=head1 FURTHER QUESTIONS?
Check the list of L<additional DBIC resources|DBIx::Class/GETTING HELP/SUPPORT>.
=head1 COPYRIGHT AND LICENSE
This module is free software L<copyright|DBIx::Class/COPYRIGHT AND LICENSE>
by the L<DBIx::Class (DBIC) authors|DBIx::Class/AUTHORS>. You can
redistribute it and/or modify it under the same terms as the
L<DBIx::Class library|DBIx::Class/COPYRIGHT AND LICENSE>.
=cut
1;
| 27.625984 | 197 | 0.700561 |
ed88f501fade41e592575bedb1e75d7f077e4836 | 5,019 | pm | Perl | lib/SNMP/Info/Layer7/Netscaler.pm | fragfutter/snmp-info | a82fe1e8766d92b2c379700a3ec400c0ce9c2669 | [
"BSD-3-Clause"
] | null | null | null | lib/SNMP/Info/Layer7/Netscaler.pm | fragfutter/snmp-info | a82fe1e8766d92b2c379700a3ec400c0ce9c2669 | [
"BSD-3-Clause"
] | null | null | null | lib/SNMP/Info/Layer7/Netscaler.pm | fragfutter/snmp-info | a82fe1e8766d92b2c379700a3ec400c0ce9c2669 | [
"BSD-3-Clause"
] | null | null | null | # SNMP::Info::Layer7::Netscaler
#
# Copyright (c) 2012 Eric Miller
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California, Santa Cruz nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
package SNMP::Info::Layer7::Netscaler;
use strict;
use warnings;
use Exporter;
use SNMP::Info::Layer7;
@SNMP::Info::Layer7::Netscaler::ISA = qw/SNMP::Info::Layer7 Exporter/;
@SNMP::Info::Layer7::Netscaler::EXPORT_OK = qw//;
our ($VERSION, %GLOBALS, %MIBS, %FUNCS, %MUNGE);
$VERSION = '3.73';
%MIBS = (
%SNMP::Info::Layer7::MIBS,
'NS-ROOT-MIB' => 'sysBuildVersion',
);
%GLOBALS = (
%SNMP::Info::Layer7::GLOBALS,
'build_ver' => 'sysBuildVersion',
'sys_hw_desc' => 'sysHardwareVersionDesc',
'sys_hw_sn' => 'sysHardwareSerialNumber',
'cpu' => 'resCpuUsage',
);
%FUNCS = (
%SNMP::Info::Layer7::FUNCS,
# IP Address Table - NS-ROOT-MIB::nsIpAddrTable
'ns_ip_index' => 'ipAddr',
'ns_ip_netmask' => 'ipNetmask',
# TODO VLAN - NS-ROOT-MIB::vlanTable
'ns_vid' =>'vlanId',
'ns_vlan_mem' => 'vlanMemberInterfaces',
'ns_vtag_int' => 'vlanTaggedInterfaces',
);
%MUNGE = ( %SNMP::Info::Layer7::MUNGE, );
sub vendor {
return 'citrix';
}
sub os {
return 'netscaler';
}
sub serial {
my $ns = shift;
return $ns->sys_hw_sn() || '';
}
sub model {
my $ns = shift;
my $desc = $ns->sys_hw_desc() || '';
$desc =~ s/^.+\bNS//i;
return $desc;
}
sub os_ver {
my $ns = shift;
my $ver = $ns->build_ver() || '';
if ($ver =~ /^.+\bNS(\d+\.\d+)/) {
$ver = $1;
}
return $ver;
}
sub ip_index {
my $ns = shift;
return $ns->ns_ip_index();
}
sub ip_netmask {
my $ns = shift;
return $ns->ns_ip_netmask();
}
1;
__END__
=head1 NAME
SNMP::Info::Layer7::Netscaler - SNMP Interface to Citrix Netscaler appliances
=head1 AUTHORS
Eric Miller
=head1 SYNOPSIS
# Let SNMP::Info determine the correct subclass for you.
my $ns = new SNMP::Info(
AutoSpecify => 1,
Debug => 1,
DestHost => 'myrouter',
Community => 'public',
Version => 2
)
or die "Can't connect to DestHost.\n";
my $class = $ns->class();
print "SNMP::Info determined this device to fall under subclass : $class\n";
=head1 DESCRIPTION
Subclass for Citrix Netscaler appliances
=head2 Inherited Classes
=over
=item SNMP::Info::Layer7
=back
=head2 Required MIBs
=over
=item F<NS-ROOT-MIB>
=item Inherited Classes' MIBs
See L<SNMP::Info::Layer7> for its own MIB requirements.
=back
=head1 GLOBALS
These are methods that return scalar value from SNMP
=over
=item $ns->vendor()
Returns 'citrix'.
=item $ns->os()
Returns 'netscaler'.
=item $ns->os_ver()
Release extracted from C<sysBuildVersion>.
=item $ns->model()
Model extracted from C<sysHardwareVersionDesc>.
=item $ns->cpu()
C<resCpuUsage>
=item $ns->build_ver()
C<sysBuildVersion>
=item $ns->sys_hw_desc()
C<sysHardwareVersionDesc>
=item $ns->serial()
C<sysHardwareSerialNumber>
=back
=head2 Globals imported from SNMP::Info::Layer7
See documentation in L<SNMP::Info::Layer7> for details.
=head1 TABLE ENTRIES
These are methods that return tables of information in the form of a reference
to a hash.
=over
=item $ns->ip_index()
C<ipAddr>
=item $ns->ip_netmask()
C<ipNetmask>
=back
=head2 Table Methods imported from SNMP::Info::Layer7
See documentation in L<SNMP::Info::Layer7> for details.
=cut
| 21.821739 | 78 | 0.659095 |
73e5316b6676d6a2a4c1afe68e22e2f48989b383 | 2,644 | pm | Perl | scripts/Common.pm | Jinzang/swift-path | 55b6c32c10967fb10dd03f1f52f2b1d956f28d98 | [
"CC-BY-3.0"
] | null | null | null | scripts/Common.pm | Jinzang/swift-path | 55b6c32c10967fb10dd03f1f52f2b1d956f28d98 | [
"CC-BY-3.0"
] | null | null | null | scripts/Common.pm | Jinzang/swift-path | 55b6c32c10967fb10dd03f1f52f2b1d956f28d98 | [
"CC-BY-3.0"
] | null | null | null | use strict;
use warnings;
package Common;
use IO::Dir;
use IO::File;
require Exporter;
our @ISA = qw(Exporter);
our @EXPORT = qw(read_file read_paragraph update_file visitor
write_file write_paragraph);
#----------------------------------------------------------------------
# Read a file
sub read_file {
my ($filename) = @_;
local $/; # to slurp the file
my $in = IO::File->new($filename, 'r')
or die "Couldn't open $filename: $!\n";
my $text = <$in>;
close($in);
return $text;
}
#----------------------------------------------------------------------
# Read a paragraph
sub read_paragraph {
my ($text, $name) = @_;
my ($para) = $text =~ /<p class="$name">\n*(.*?)\n*<\/p>/sm;
return $para;
}
#----------------------------------------------------------------------
# Update a file
sub update_file {
my ($filename) = @_;
my $saved_filename = "$filename~";
rename($filename, $saved_filename);
return read_file($saved_filename);
}
#----------------------------------------------------------------------
# Return a closure that visits files in a directory and subdirectories
sub visitor {
my ($top_dir, $pattern) = @_;
my @dirlist;
my @filelist;
push(@dirlist, $top_dir);
return sub {
for (;;) {
my $file = shift @filelist;
return $file if defined $file;
my $dir = shift(@dirlist);
return unless defined $dir;
my $dd = IO::Dir->new($dir) or die "Couldn't open $dir: $!\n";
# Find matching files and directories
while (defined (my $file = $dd->read())) {
next if $file eq '.' || $file eq '..';
my $newfile = "$dir/$file";
if (-d $newfile) {
push(@dirlist, $newfile);
} elsif ($file =~ /^$pattern$/) {
push(@filelist, $newfile);
}
}
$dd->close;
}
return;
};
}
#----------------------------------------------------------------------
# Write text to a file
sub write_file {
my ($filename, $text) = @_;
my $out = IO::File->new($filename, 'w')
or die "Couldn't open $filename: $!\n";
print $out $text;
close($out);
return;
}
#----------------------------------------------------------------------
# Substitute cleaned up data back into template
sub write_paragraph {
my ($text, $name, $para) = @_;
my $new_text = $text;
$new_text =~ s/<p class="$name">.*?<\/p>/<p class="$name">\n$para\n<\/p>/sm;
return $new_text;
}
1;
| 21.85124 | 80 | 0.434191 |
73e30a51393427de9461852b9d99efe88f7672b5 | 5,883 | pm | Perl | lib/Kubernetes/Object/V1APIResourceList.pm | yue9944882/perl | b6446c40bc7b00a9a5aca8e63532864e922f7f87 | [
"Apache-2.0"
] | null | null | null | lib/Kubernetes/Object/V1APIResourceList.pm | yue9944882/perl | b6446c40bc7b00a9a5aca8e63532864e922f7f87 | [
"Apache-2.0"
] | null | null | null | lib/Kubernetes/Object/V1APIResourceList.pm | yue9944882/perl | b6446c40bc7b00a9a5aca8e63532864e922f7f87 | [
"Apache-2.0"
] | null | null | null | =begin comment
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
OpenAPI spec version: v1.13.5
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# Do not edit the class manually.
# Ref: https://openapi-generator.tech
#
package Kubernetes::Object::V1APIResourceList;
require 5.6.0;
use strict;
use warnings;
use utf8;
use JSON qw(decode_json);
use Data::Dumper;
use Module::Runtime qw(use_module);
use Log::Any qw($log);
use Date::Parse;
use DateTime;
use Kubernetes::Object::V1APIResource;
use base ("Class::Accessor", "Class::Data::Inheritable");
#
#APIResourceList is a list of APIResource, it is used to expose the name of the resources supported in a specific group and version, and if the resource is namespaced.
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). Do not edit the class manually.
# REF: https://openapi-generator.tech
#
=begin comment
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
OpenAPI spec version: v1.13.5
Generated by: https://openapi-generator.tech
=end comment
=cut
#
# NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# Do not edit the class manually.
# Ref: https://openapi-generator.tech
#
__PACKAGE__->mk_classdata('attribute_map' => {});
__PACKAGE__->mk_classdata('openapi_types' => {});
__PACKAGE__->mk_classdata('method_documentation' => {});
__PACKAGE__->mk_classdata('class_documentation' => {});
# new object
sub new {
my ($class, %args) = @_;
my $self = bless {}, $class;
foreach my $attribute (keys %{$class->attribute_map}) {
my $args_key = $class->attribute_map->{$attribute};
$self->$attribute( $args{ $args_key } );
}
return $self;
}
# return perl hash
sub to_hash {
return decode_json(JSON->new->convert_blessed->encode( shift ));
}
# used by JSON for serialization
sub TO_JSON {
my $self = shift;
my $_data = {};
foreach my $_key (keys %{$self->attribute_map}) {
if (defined $self->{$_key}) {
$_data->{$self->attribute_map->{$_key}} = $self->{$_key};
}
}
return $_data;
}
# from Perl hashref
sub from_hash {
my ($self, $hash) = @_;
# loop through attributes and use openapi_types to deserialize the data
while ( my ($_key, $_type) = each %{$self->openapi_types} ) {
my $_json_attribute = $self->attribute_map->{$_key};
if ($_type =~ /^array\[/i) { # array
my $_subclass = substr($_type, 6, -1);
my @_array = ();
foreach my $_element (@{$hash->{$_json_attribute}}) {
push @_array, $self->_deserialize($_subclass, $_element);
}
$self->{$_key} = \@_array;
} elsif (exists $hash->{$_json_attribute}) { #hash(model), primitive, datetime
$self->{$_key} = $self->_deserialize($_type, $hash->{$_json_attribute});
} else {
$log->debugf("Warning: %s (%s) does not exist in input hash\n", $_key, $_json_attribute);
}
}
return $self;
}
# deserialize non-array data
sub _deserialize {
my ($self, $type, $data) = @_;
$log->debugf("deserializing %s with %s",Dumper($data), $type);
if ($type eq 'DateTime') {
return DateTime->from_epoch(epoch => str2time($data));
} elsif ( grep( /^$type$/, ('int', 'double', 'string', 'boolean'))) {
return $data;
} else { # hash(model)
my $_instance = eval "Kubernetes::Object::$type->new()";
return $_instance->from_hash($data);
}
}
__PACKAGE__->class_documentation({description => 'APIResourceList is a list of APIResource, it is used to expose the name of the resources supported in a specific group and version, and if the resource is namespaced.',
class => 'V1APIResourceList',
required => [], # TODO
} );
__PACKAGE__->method_documentation({
'api_version' => {
datatype => 'string',
base_name => 'apiVersion',
description => 'APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources',
format => '',
read_only => '',
},
'group_version' => {
datatype => 'string',
base_name => 'groupVersion',
description => 'groupVersion is the group and version this APIResourceList is for.',
format => '',
read_only => '',
},
'kind' => {
datatype => 'string',
base_name => 'kind',
description => 'Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds',
format => '',
read_only => '',
},
'resources' => {
datatype => 'ARRAY[V1APIResource]',
base_name => 'resources',
description => 'resources contains the name of the resources and if they are namespaced.',
format => '',
read_only => '',
},
});
__PACKAGE__->openapi_types( {
'api_version' => 'string',
'group_version' => 'string',
'kind' => 'string',
'resources' => 'ARRAY[V1APIResource]'
} );
__PACKAGE__->attribute_map( {
'api_version' => 'apiVersion',
'group_version' => 'groupVersion',
'kind' => 'kind',
'resources' => 'resources'
} );
__PACKAGE__->mk_accessors(keys %{__PACKAGE__->attribute_map});
1;
| 30.169231 | 296 | 0.63964 |
ede21b8db279576a77c5a41ba982f1f4bf54380f | 232 | t | Perl | tests/store/objects.t | Gaia-Interactive/gaia_core_php | c6ef27682e4ed96cd8d55ae4649e9ed59d18e02a | [
"BSD-3-Clause"
] | 3 | 2015-02-23T19:57:07.000Z | 2020-07-13T16:02:01.000Z | tests/store/objects.t | Gaia-Interactive/gaia_core_php | c6ef27682e4ed96cd8d55ae4649e9ed59d18e02a | [
"BSD-3-Clause"
] | null | null | null | tests/store/objects.t | Gaia-Interactive/gaia_core_php | c6ef27682e4ed96cd8d55ae4649e9ed59d18e02a | [
"BSD-3-Clause"
] | 4 | 2015-03-03T08:14:12.000Z | 2019-01-09T04:46:43.000Z | #!/usr/bin/env php
<?php
include_once __DIR__ . '/../common.php';
$input = array('a'=>new Gaia\Store\KVP(), 'b'=>new stdclass, 'c'=> new ArrayIterator( array(1,2,3) ) );
include dirname(__FILE__) . DIRECTORY_SEPARATOR . 'base.php';
| 38.666667 | 103 | 0.663793 |
edd19dc3eaca1b5b4f818958bef78dcda33df3f4 | 2,262 | pm | Perl | storage/lenovo/iomega/snmp/mode/components/raid.pm | dalfo77/centreon-plugins | 3cb2011c46a45b5e4a785ca6bab439142f882d45 | [
"Apache-2.0"
] | null | null | null | storage/lenovo/iomega/snmp/mode/components/raid.pm | dalfo77/centreon-plugins | 3cb2011c46a45b5e4a785ca6bab439142f882d45 | [
"Apache-2.0"
] | null | null | null | storage/lenovo/iomega/snmp/mode/components/raid.pm | dalfo77/centreon-plugins | 3cb2011c46a45b5e4a785ca6bab439142f882d45 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2020 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package storage::lenovo::iomega::snmp::mode::components::raid;
use strict;
use warnings;
my $mapping = {
raidStatus => { oid => '.1.3.6.1.4.1.11369.10.4.1' }
};
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $mapping->{raidStatus}->{oid} };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => 'checking raids');
$self->{components}->{raid} = { name => 'raids', total => 0, skip => 0 };
return if ($self->check_filter(section => 'raid'));
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{ $mapping->{raidStatus}->{oid} }, instance => '0');
return if (!defined($result->{raidStatus}));
my $instance = 1;
next if ($self->check_filter(section => 'raid', instance => $instance));
$self->{components}->{raid}->{total}++;
$self->{output}->output_add(
long_msg => sprintf(
"raid '%s' status is '%s' [instance = %s]",
$instance, $result->{raidStatus}, $instance
)
);
my $exit = $self->get_severity(section => 'raid', value => $result->{raidStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(
severity => $exit,
short_msg => sprintf(
"Raid '%s' status is '%s'", $instance, $result->{raidStatus}
)
);
}
}
1;
| 33.264706 | 148 | 0.596375 |
eddbdfe4697076d564f17923f82feb6f2fbca9e7 | 70 | pl | Perl | src/tutorial/program/nrev.pl | lindseyspratt/wa-prolog | 953d32071dc3086d3ff225685d4a708e7103f3a6 | [
"MIT"
] | 2 | 2021-04-15T18:54:16.000Z | 2021-04-15T21:29:29.000Z | src/tutorial/program/nrev.pl | lindseyspratt/wa-prolog | 953d32071dc3086d3ff225685d4a708e7103f3a6 | [
"MIT"
] | null | null | null | src/tutorial/program/nrev.pl | lindseyspratt/wa-prolog | 953d32071dc3086d3ff225685d4a708e7103f3a6 | [
"MIT"
] | null | null | null | nrev([],[]).
nrev([X|Rest],Ans):-
nrev(Rest,L),
append(L,[X],Ans).
| 11.666667 | 20 | 0.514286 |
ed6e3f707ee140711c437c6b39c99fb78220df4a | 1,146 | pm | Perl | lib/Google/Ads/GoogleAds/V3/Services/GoogleAdsService/MutateGoogleAdsResponse.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
] | null | null | null | lib/Google/Ads/GoogleAds/V3/Services/GoogleAdsService/MutateGoogleAdsResponse.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
] | null | null | null | lib/Google/Ads/GoogleAds/V3/Services/GoogleAdsService/MutateGoogleAdsResponse.pm | PierrickVoulet/google-ads-perl | bc9fa2de22aa3e11b99dc22251d90a1723dd8cc4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Google::Ads::GoogleAds::V3::Services::GoogleAdsService::MutateGoogleAdsResponse;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
mutateOperationResponses => $args->{mutateOperationResponses},
partialFailureError => $args->{partialFailureError}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| 30.972973 | 88 | 0.739965 |
ed9484c3e68c389511761b0b9b67d8cc2b0b1cc7 | 849 | pl | Perl | project_rloop/tabler.pl | KorfLab/datacore | f6eb04650d8257a8e2eecd44928a60368d374d38 | [
"MIT"
] | null | null | null | project_rloop/tabler.pl | KorfLab/datacore | f6eb04650d8257a8e2eecd44928a60368d374d38 | [
"MIT"
] | null | null | null | project_rloop/tabler.pl | KorfLab/datacore | f6eb04650d8257a8e2eecd44928a60368d374d38 | [
"MIT"
] | null | null | null | use strict;
use warnings FATAL => 'all';
die "usage: $0 <file1> <file2>\n" unless @ARGV == 2;
my $d0 = read_file($ARGV[0]);
my $d1 = read_file($ARGV[1]);
foreach my $k1 (sort keys %$d0) {
foreach my $k2 (sort keys %{$d0->{$k1}}) {
print join("\t",
$k1,
$k2,
$d0->{$k1}{$k2},
$d1->{$k1}{$k2},
abs($d0->{$k1}{$k2} - $d1->{$k1}{$k2})), "\n";
}
}
sub read_file {
my ($file) = @_;
my %d;
open(my $fh, $file) or die;
while (<$fh>) {
my ($f1, $f2, $v) = split;
my $t1 = make_name($f1);
my $t2 = make_name($f2);
$d{$t1}{$t2} = $v;
}
return \%d;
}
sub make_name {
my ($text) = @_;
my $name = "";
if ($text =~ /HeLa/) {$name .= "H"}
else {$name .= "K"}
my ($n) = $text =~ /_rep(\d)_/;
$name .= $n;
if ($text =~ /_neg/) {$name .= "-"}
else {$name .= "+"}
return $name;
} | 17.326531 | 52 | 0.448763 |
edcc6544f6e257f46159d96d9336a14e219c16fe | 4,514 | pm | Perl | msys64/usr/lib/perl5/core_perl/Sub/Util.pm | andresfaagit/sg-desarrollo-ruby | 2c9bc5dad657d9713cc0f7fc4e883a85b83537e5 | [
"Ruby"
] | 9 | 2018-04-19T05:08:30.000Z | 2021-11-23T07:36:58.000Z | msys64/usr/lib/perl5/core_perl/Sub/Util.pm | andresfaagit/sg-desarrollo-ruby | 2c9bc5dad657d9713cc0f7fc4e883a85b83537e5 | [
"Ruby"
] | 98 | 2017-11-02T19:00:44.000Z | 2022-03-22T16:15:39.000Z | msys64/usr/lib/perl5/core_perl/Sub/Util.pm | andresfaagit/sg-desarrollo-ruby | 2c9bc5dad657d9713cc0f7fc4e883a85b83537e5 | [
"Ruby"
] | 9 | 2017-10-24T21:53:36.000Z | 2021-11-23T07:36:59.000Z | # Copyright (c) 2014 Paul Evans <[email protected]>. All rights reserved.
# This program is free software; you can redistribute it and/or
# modify it under the same terms as Perl itself.
package Sub::Util;
use strict;
use warnings;
require Exporter;
our @ISA = qw( Exporter );
our @EXPORT_OK = qw(
prototype set_prototype
subname set_subname
);
our $VERSION = "1.50";
$VERSION = eval $VERSION;
require List::Util; # as it has the XS
List::Util->VERSION( $VERSION ); # Ensure we got the right XS version (RT#100863)
=head1 NAME
Sub::Util - A selection of utility subroutines for subs and CODE references
=head1 SYNOPSIS
use Sub::Util qw( prototype set_prototype subname set_subname );
=head1 DESCRIPTION
C<Sub::Util> contains a selection of utility subroutines that are useful for
operating on subs and CODE references.
The rationale for inclusion in this module is that the function performs some
work for which an XS implementation is essential because it cannot be
implemented in Pure Perl, and which is sufficiently-widely used across CPAN
that its popularity warrants inclusion in a core module, which this is.
=cut
=head1 FUNCTIONS
=cut
=head2 prototype
my $proto = prototype( $code )
I<Since version 1.40.>
Returns the prototype of the given C<$code> reference, if it has one, as a
string. This is the same as the C<CORE::prototype> operator; it is included
here simply for symmetry and completeness with the other functions.
=cut
sub prototype
{
my ( $code ) = @_;
return CORE::prototype( $code );
}
=head2 set_prototype
my $code = set_prototype $prototype, $code;
I<Since version 1.40.>
Sets the prototype of the function given by the C<$code> reference, or deletes
it if C<$prototype> is C<undef>. Returns the C<$code> reference itself.
I<Caution>: This function takes arguments in a different order to the previous
copy of the code from C<Scalar::Util>. This is to match the order of
C<set_subname>, and other potential additions in this file. This order has
been chosen as it allows a neat and simple chaining of other
C<Sub::Util::set_*> functions as might become available, such as:
my $code =
set_subname name_here =>
set_prototype '&@' =>
set_attribute ':lvalue' =>
sub { ...... };
=cut
=head2 subname
my $name = subname( $code )
I<Since version 1.40.>
Returns the name of the given C<$code> reference, if it has one. Normal named
subs will give a fully-qualified name consisting of the package and the
localname separated by C<::>. Anonymous code references will give C<__ANON__>
as the localname. If a name has been set using L</set_subname>, this name will
be returned instead.
This function was inspired by C<sub_fullname> from L<Sub::Identify>. The
remaining functions that C<Sub::Identify> implements can easily be emulated
using regexp operations, such as
sub get_code_info { return (subname $_[0]) =~ m/^(.+)::(.*?)$/ }
sub sub_name { return (get_code_info $_[0])[0] }
sub stash_name { return (get_code_info $_[0])[1] }
I<Users of Sub::Name beware>: This function is B<not> the same as
C<Sub::Name::subname>; it returns the existing name of the sub rather than
changing it. To set or change a name, see instead L</set_subname>.
=cut
=head2 set_subname
my $code = set_subname $name, $code;
I<Since version 1.40.>
Sets the name of the function given by the C<$code> reference. Returns the
C<$code> reference itself. If the C<$name> is unqualified, the package of the
caller is used to qualify it.
This is useful for applying names to anonymous CODE references so that stack
traces and similar situations, to give a useful name rather than having the
default of C<__ANON__>. Note that this name is only used for this situation;
the C<set_subname> will not install it into the symbol table; you will have to
do that yourself if required.
However, since the name is not used by perl except as the return value of
C<caller>, for stack traces or similar, there is no actual requirement that
the name be syntactically valid as a perl function name. This could be used to
attach extra information that could be useful in debugging stack traces.
This function was copied from C<Sub::Name::subname> and renamed to the naming
convention of this module.
=cut
=head1 AUTHOR
The general structure of this module was written by Paul Evans
<[email protected]>.
The XS implementation of L</set_subname> was copied from L<Sub::Name> by
Matthijs van Duin <[email protected]>
=cut
1;
| 29.697368 | 81 | 0.740806 |
ede82da093495ec191ceea3fa508eeaec2bfcbd7 | 486 | t | Perl | t/default-directory.t | chocolateboy/App-Wax | 8e199c8d776179a2f99608fe4525921d431d7aae | [
"Artistic-2.0"
] | null | null | null | t/default-directory.t | chocolateboy/App-Wax | 8e199c8d776179a2f99608fe4525921d431d7aae | [
"Artistic-2.0"
] | 1 | 2018-09-29T04:48:08.000Z | 2018-09-29T04:48:08.000Z | t/default-directory.t | chocolateboy/App-Wax | 8e199c8d776179a2f99608fe4525921d431d7aae | [
"Artistic-2.0"
] | 1 | 2018-09-29T04:33:43.000Z | 2018-09-29T04:33:43.000Z | use strict;
use warnings;
use FindBin qw($Bin);
use lib "$Bin/lib";
use Test::App::Wax qw(@DEFAULT @URL wax_is);
use Test::More tests => 4;
wax_is(
"wax --cache --default-directory cmd --foo $URL[0]",
"cmd --foo $DEFAULT[0]"
);
wax_is(
"wax --cache -D cmd --foo $URL[0]",
"cmd --foo $DEFAULT[0]"
);
wax_is(
"wax -c --default-directory cmd --foo $URL[0]",
"cmd --foo $DEFAULT[0]"
);
wax_is(
"wax -c -D cmd --foo $URL[0]",
"cmd --foo $DEFAULT[0]"
);
| 16.758621 | 56 | 0.561728 |
ed80180d461896830e136ac3616d5bbac2a43da5 | 366 | pm | Perl | lib/Moose/Exception/AddParameterizableTypeTakesParameterizableType.pm | mjemmeson/Moose | 500a1f3331d4a7187775f4b82cdc4f1af2e232d6 | [
"Artistic-1.0"
] | null | null | null | lib/Moose/Exception/AddParameterizableTypeTakesParameterizableType.pm | mjemmeson/Moose | 500a1f3331d4a7187775f4b82cdc4f1af2e232d6 | [
"Artistic-1.0"
] | null | null | null | lib/Moose/Exception/AddParameterizableTypeTakesParameterizableType.pm | mjemmeson/Moose | 500a1f3331d4a7187775f4b82cdc4f1af2e232d6 | [
"Artistic-1.0"
] | null | null | null | package Moose::Exception::AddParameterizableTypeTakesParameterizableType;
our $VERSION = '2.1803';
use Moose;
extends 'Moose::Exception';
has 'type_name' => (
is => 'ro',
isa => 'Str',
required => 1
);
sub _build_message {
my $self = shift;
"Type must be a Moose::Meta::TypeConstraint::Parameterizable not ".$self->type_name;
}
1;
| 19.263158 | 88 | 0.644809 |
edc6478a3d251e0a58769f91b114dd4aa3576a82 | 652 | t | Perl | t/Catmandu-Fix-split_field.t | gitpan/Catmandu | 014bb49639534756856f8db46a999d879269122f | [
"Artistic-1.0"
] | null | null | null | t/Catmandu-Fix-split_field.t | gitpan/Catmandu | 014bb49639534756856f8db46a999d879269122f | [
"Artistic-1.0"
] | null | null | null | t/Catmandu-Fix-split_field.t | gitpan/Catmandu | 014bb49639534756856f8db46a999d879269122f | [
"Artistic-1.0"
] | null | null | null | #!/usr/bin/env perl
use strict;
use warnings;
use Test::More;
use Test::Exception;
my $pkg;
BEGIN {
$pkg = 'Catmandu::Fix::split_field';
use_ok $pkg;
}
is_deeply
$pkg->new('splitme', ',')->fix({splitme => "a,b,c"}),
{splitme => ["a", "b", "c"]},
"split value";
is_deeply
$pkg->new('many.*.splitme', ',')->fix({many => [{splitme => "a,b,c"}, {splitme => "a,b,c"}]}),
{many => [{splitme => ["a", "b", "c"]}, {splitme => ["a", "b", "c"]}]},
"split wildcard values";
is_deeply
$pkg->new('splitme', ',')->fix({splitme => ["a", "b", "c"]}),
{splitme => ["a", "b", "c"]},
"only split values";
done_testing 4;
| 21.733333 | 98 | 0.503067 |
ede0c82c95301a312b3cae143bf668da30625ba4 | 238 | plx | Perl | .kodi/userdata/addon_data/script.navi-x/My Playlists.plx | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | null | null | null | .kodi/userdata/addon_data/script.navi-x/My Playlists.plx | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | null | null | null | .kodi/userdata/addon_data/script.navi-x/My Playlists.plx | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | 2 | 2018-04-17T17:34:39.000Z | 2020-07-26T03:43:33.000Z | version=5
#
#playlist properties:
background=default
logo=none
title=My Playlists
#
type=text
name=Readme
thumb=default
URL=My Playlists/readme.txt
#
type=playlist
name=Examples
thumb=default
URL=examples/examples.plx
#
| 13.222222 | 28 | 0.747899 |
edddfec7d2bfae249d5a518c33b30b6b878e8b8b | 2,098 | pm | Perl | auto-lib/Paws/ES/VPCDerivedInfo.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/ES/VPCDerivedInfo.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/ES/VPCDerivedInfo.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z | # Generated by default/object.tt
package Paws::ES::VPCDerivedInfo;
use Moose;
has AvailabilityZones => (is => 'ro', isa => 'ArrayRef[Str|Undef]');
has SecurityGroupIds => (is => 'ro', isa => 'ArrayRef[Str|Undef]');
has SubnetIds => (is => 'ro', isa => 'ArrayRef[Str|Undef]');
has VPCId => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::ES::VPCDerivedInfo
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::ES::VPCDerivedInfo object:
$service_obj->Method(Att1 => { AvailabilityZones => $value, ..., VPCId => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::ES::VPCDerivedInfo object:
$result = $service_obj->Method(...);
$result->Att1->AvailabilityZones
=head1 DESCRIPTION
Options to specify the subnets and security groups for VPC endpoint.
For more information, see VPC Endpoints for Amazon Elasticsearch
Service Domains
(http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-vpc.html).
=head1 ATTRIBUTES
=head2 AvailabilityZones => ArrayRef[Str|Undef]
The availability zones for the Elasticsearch domain. Exists only if the
domain was created with VPCOptions.
=head2 SecurityGroupIds => ArrayRef[Str|Undef]
Specifies the security groups for VPC endpoint.
=head2 SubnetIds => ArrayRef[Str|Undef]
Specifies the subnets for VPC endpoint.
=head2 VPCId => Str
The VPC Id for the Elasticsearch domain. Exists only if the domain was
created with VPCOptions.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::ES>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 25.585366 | 102 | 0.736892 |
ed9c626a20b7f4dd373fa7d50bc45f68c874dd53 | 252,572 | pm | Perl | bin/lib/Image/ExifTool/Exif.pm | mceachen/exiftool_vendored.rb | bab2705f32f3b8fc47486ec9ceb6f012972419c2 | [
"MIT"
] | 5 | 2017-02-18T11:03:32.000Z | 2019-01-29T16:04:41.000Z | bin/lib/Image/ExifTool/Exif.pm | mceachen/exiftool_vendored.rb | bab2705f32f3b8fc47486ec9ceb6f012972419c2 | [
"MIT"
] | null | null | null | bin/lib/Image/ExifTool/Exif.pm | mceachen/exiftool_vendored.rb | bab2705f32f3b8fc47486ec9ceb6f012972419c2 | [
"MIT"
] | null | null | null | #------------------------------------------------------------------------------
# File: Exif.pm
#
# Description: Read EXIF/TIFF meta information
#
# Revisions: 11/25/2003 - P. Harvey Created
# 02/06/2004 - P. Harvey Moved processing functions from ExifTool
# 03/19/2004 - P. Harvey Check PreviewImage for validity
# 11/11/2004 - P. Harvey Split off maker notes into MakerNotes.pm
# 12/13/2004 - P. Harvey Added AUTOLOAD to load write routines
#
# References: 0) http://www.exif.org/Exif2-2.PDF
# 1) http://partners.adobe.com/asn/developer/pdfs/tn/TIFF6.pdf
# 2) http://www.adobe.com/products/dng/pdfs/dng_spec_1_3_0_0.pdf
# 3) http://www.awaresystems.be/imaging/tiff/tifftags.html
# 4) http://www.remotesensing.org/libtiff/TIFFTechNote2.html
# 5) http://www.exif.org/dcf.PDF
# 6) http://park2.wakwak.com/~tsuruzoh/Computer/Digicams/exif-e.html
# 7) http://www.fine-view.com/jp/lab/doc/ps6ffspecsv2.pdf
# 8) http://www.ozhiker.com/electronics/pjmt/jpeg_info/meta.html
# 9) http://hul.harvard.edu/jhove/tiff-tags.html
# 10) http://partners.adobe.com/public/developer/en/tiff/TIFFPM6.pdf
# 11) Robert Mucke private communication
# 12) http://www.broomscloset.com/closet/photo/exif/TAG2000-22_DIS12234-2.PDF
# 13) http://www.microsoft.com/whdc/xps/wmphoto.mspx
# 14) http://www.asmail.be/msg0054681802.html
# 15) http://crousseau.free.fr/imgfmt_raw.htm
# 16) http://www.cybercom.net/~dcoffin/dcraw/
# 17) http://www.digitalpreservation.gov/formats/content/tiff_tags.shtml
# 18) http://www.asmail.be/msg0055568584.html
# 19) http://libpsd.graphest.com/files/Photoshop%20File%20Formats.pdf
# 20) http://tiki-lounge.com/~raf/tiff/fields.html
# 21) http://community.roxen.com/developers/idocs/rfc/rfc3949.html
# 22) http://tools.ietf.org/html/draft-ietf-fax-tiff-fx-extension1-01
# 23) MetaMorph Stack (STK) Image File Format:
# --> ftp://ftp.meta.moleculardevices.com/support/stack/STK.doc
# 24) http://www.cipa.jp/std/documents/e/DC-008-2012_E.pdf (Exif 2.3)
# 25) Vesa Kivisto private communication (7D)
# 26) Jeremy Brown private communication
# 27) Gregg Lee private communication
# 28) http://wwwimages.adobe.com/www.adobe.com/content/dam/Adobe/en/devnet/cinemadng/pdfs/CinemaDNG_Format_Specification_v1_1.pdf
# 29) http://www.libtiff.org
# 30) http://geotiff.maptools.org/spec/geotiffhome.html
# 31) https://android.googlesource.com/platform/external/dng_sdk/+/refs/heads/master/source/dng_tag_codes.h
# 32) Jeffry Friedl private communication
# IB) Iliah Borg private communication (LibRaw)
# JD) Jens Duttke private communication
#------------------------------------------------------------------------------
package Image::ExifTool::Exif;
use strict;
use vars qw($VERSION $AUTOLOAD @formatSize @formatName %formatNumber %intFormat
%lightSource %flash %compression %photometricInterpretation %orientation
%subfileType %saveForValidate);
use Image::ExifTool qw(:DataAccess :Utils);
use Image::ExifTool::MakerNotes;
$VERSION = '4.40';
sub ProcessExif($$$);
sub WriteExif($$$);
sub CheckExif($$$);
sub RebuildMakerNotes($$$);
sub EncodeExifText($$);
sub ValidateIFD($;$);
sub ValidateImageData($$$;$);
sub ProcessTiffIFD($$$);
sub PrintParameter($$$);
sub GetOffList($$$$$);
sub PrintOpcode($$$);
sub PrintLensInfo($);
sub ConvertLensInfo($);
# size limit for loading binary data block into memory
sub BINARY_DATA_LIMIT { return 10 * 1024 * 1024; }
# byte sizes for the various EXIF format types below
@formatSize = (undef,1,1,2,4,8,1,1,2,4,8,4,8,4,2,8,8,8,8);
@formatName = (
undef, 'int8u', 'string', 'int16u',
'int32u', 'rational64u','int8s', 'undef',
'int16s', 'int32s', 'rational64s','float',
'double', 'ifd', 'unicode', 'complex',
'int64u', 'int64s', 'ifd64', # (new BigTIFF formats)
);
# hash to look up EXIF format numbers by name
# (format types are all lower case)
%formatNumber = (
'int8u' => 1, # BYTE
'string' => 2, # ASCII
'int16u' => 3, # SHORT
'int32u' => 4, # LONG
'rational64u' => 5, # RATIONAL
'int8s' => 6, # SBYTE
'undef' => 7, # UNDEFINED
'binary' => 7, # (same as undef)
'int16s' => 8, # SSHORT
'int32s' => 9, # SLONG
'rational64s' => 10, # SRATIONAL
'float' => 11, # FLOAT
'double' => 12, # DOUBLE
'ifd' => 13, # IFD (with int32u format)
'unicode' => 14, # UNICODE [see Note below]
'complex' => 15, # COMPLEX [see Note below]
'int64u' => 16, # LONG8 [BigTIFF]
'int64s' => 17, # SLONG8 [BigTIFF]
'ifd64' => 18, # IFD8 (with int64u format) [BigTIFF]
# Note: unicode and complex types are not yet properly supported by ExifTool.
# These are types which have been observed in the Adobe DNG SDK code, but
# aren't fully supported there either. We know the sizes, but that's about it.
# We don't know if the unicode is null terminated, or the format for complex
# (although I suspect it would be two 4-byte floats, real and imaginary).
);
# lookup for integer format strings
%intFormat = (
'int8u' => 1,
'int16u' => 3,
'int32u' => 4,
'int8s' => 6,
'int16s' => 8,
'int32s' => 9,
'ifd' => 13,
'int64u' => 16,
'int64s' => 17,
'ifd64' => 18,
);
# EXIF LightSource PrintConv values
%lightSource = (
0 => 'Unknown',
1 => 'Daylight',
2 => 'Fluorescent',
3 => 'Tungsten (Incandescent)',
4 => 'Flash',
9 => 'Fine Weather',
10 => 'Cloudy',
11 => 'Shade',
12 => 'Daylight Fluorescent', # (D 5700 - 7100K)
13 => 'Day White Fluorescent', # (N 4600 - 5500K)
14 => 'Cool White Fluorescent', # (W 3800 - 4500K)
15 => 'White Fluorescent', # (WW 3250 - 3800K)
16 => 'Warm White Fluorescent', # (L 2600 - 3250K)
17 => 'Standard Light A',
18 => 'Standard Light B',
19 => 'Standard Light C',
20 => 'D55',
21 => 'D65',
22 => 'D75',
23 => 'D50',
24 => 'ISO Studio Tungsten',
255 => 'Other',
);
# EXIF Flash values
%flash = (
OTHER => sub {
# translate "Off" and "On" when writing
my ($val, $inv) = @_;
return undef unless $inv and $val =~ /^(off|on)$/i;
return lc $val eq 'off' ? 0x00 : 0x01;
},
0x00 => 'No Flash',
0x01 => 'Fired',
0x05 => 'Fired, Return not detected',
0x07 => 'Fired, Return detected',
0x08 => 'On, Did not fire', # not charged up?
0x09 => 'On, Fired',
0x0d => 'On, Return not detected',
0x0f => 'On, Return detected',
0x10 => 'Off, Did not fire',
0x14 => 'Off, Did not fire, Return not detected',
0x18 => 'Auto, Did not fire',
0x19 => 'Auto, Fired',
0x1d => 'Auto, Fired, Return not detected',
0x1f => 'Auto, Fired, Return detected',
0x20 => 'No flash function',
0x30 => 'Off, No flash function',
0x41 => 'Fired, Red-eye reduction',
0x45 => 'Fired, Red-eye reduction, Return not detected',
0x47 => 'Fired, Red-eye reduction, Return detected',
0x49 => 'On, Red-eye reduction',
0x4d => 'On, Red-eye reduction, Return not detected',
0x4f => 'On, Red-eye reduction, Return detected',
0x50 => 'Off, Red-eye reduction',
0x58 => 'Auto, Did not fire, Red-eye reduction',
0x59 => 'Auto, Fired, Red-eye reduction',
0x5d => 'Auto, Fired, Red-eye reduction, Return not detected',
0x5f => 'Auto, Fired, Red-eye reduction, Return detected',
);
# TIFF Compression values
# (values with format "Xxxxx XXX Compressed" are used to identify RAW file types)
%compression = (
1 => 'Uncompressed',
2 => 'CCITT 1D',
3 => 'T4/Group 3 Fax',
4 => 'T6/Group 4 Fax',
5 => 'LZW',
6 => 'JPEG (old-style)', #3
7 => 'JPEG', #4
8 => 'Adobe Deflate', #3
9 => 'JBIG B&W', #3
10 => 'JBIG Color', #3
99 => 'JPEG', #16
262 => 'Kodak 262', #16
32766 => 'Next', #3
32767 => 'Sony ARW Compressed', #16
32769 => 'Packed RAW', #PH (used by Epson, Nikon, Samsung)
32770 => 'Samsung SRW Compressed', #PH
32771 => 'CCIRLEW', #3
32772 => 'Samsung SRW Compressed 2', #PH (NX3000,NXmini)
32773 => 'PackBits',
32809 => 'Thunderscan', #3
32867 => 'Kodak KDC Compressed', #PH
32895 => 'IT8CTPAD', #3
32896 => 'IT8LW', #3
32897 => 'IT8MP', #3
32898 => 'IT8BL', #3
32908 => 'PixarFilm', #3
32909 => 'PixarLog', #3
# 32910,32911 - Pixar reserved
32946 => 'Deflate', #3
32947 => 'DCS', #3
33003 => 'Aperio JPEG 2000 YCbCr', #https://openslide.org/formats/aperio/
33005 => 'Aperio JPEG 2000 RGB', #https://openslide.org/formats/aperio/
34661 => 'JBIG', #3
34676 => 'SGILog', #3
34677 => 'SGILog24', #3
34712 => 'JPEG 2000', #3
34713 => 'Nikon NEF Compressed', #PH
34715 => 'JBIG2 TIFF FX', #20
34718 => 'Microsoft Document Imaging (MDI) Binary Level Codec', #18
34719 => 'Microsoft Document Imaging (MDI) Progressive Transform Codec', #18
34720 => 'Microsoft Document Imaging (MDI) Vector', #18
34887 => 'ESRI Lerc', #LibTiff
# 34888,34889 - ESRI reserved
34892 => 'Lossy JPEG', # (DNG 1.4)
34925 => 'LZMA2', #LibTiff
34926 => 'Zstd', #LibTiff
34927 => 'WebP', #LibTiff
34933 => 'PNG', # (TIFF mail list)
34934 => 'JPEG XR', # (TIFF mail list)
65000 => 'Kodak DCR Compressed', #PH
65535 => 'Pentax PEF Compressed', #Jens
);
%photometricInterpretation = (
0 => 'WhiteIsZero',
1 => 'BlackIsZero',
2 => 'RGB',
3 => 'RGB Palette',
4 => 'Transparency Mask',
5 => 'CMYK',
6 => 'YCbCr',
8 => 'CIELab',
9 => 'ICCLab', #3
10 => 'ITULab', #3
32803 => 'Color Filter Array', #2
32844 => 'Pixar LogL', #3
32845 => 'Pixar LogLuv', #3
32892 => 'Sequential Color Filter', #JR (Sony ARQ)
34892 => 'Linear Raw', #2
51177 => 'Depth Map', # (DNG 1.5)
52527 => 'Semantic Mask', # (DNG 1.6)
);
%orientation = (
1 => 'Horizontal (normal)',
2 => 'Mirror horizontal',
3 => 'Rotate 180',
4 => 'Mirror vertical',
5 => 'Mirror horizontal and rotate 270 CW',
6 => 'Rotate 90 CW',
7 => 'Mirror horizontal and rotate 90 CW',
8 => 'Rotate 270 CW',
);
%subfileType = (
0 => 'Full-resolution image',
1 => 'Reduced-resolution image',
2 => 'Single page of multi-page image',
3 => 'Single page of multi-page reduced-resolution image',
4 => 'Transparency mask',
5 => 'Transparency mask of reduced-resolution image',
6 => 'Transparency mask of multi-page image',
7 => 'Transparency mask of reduced-resolution multi-page image',
8 => 'Depth map', # (DNG 1.5)
9 => 'Depth map of reduced-resolution image', # (DNG 1.5)
16 => 'Enhanced image data', # (DNG 1.5)
0x10001 => 'Alternate reduced-resolution image', # (DNG 1.2)
0x10004 => 'Semantic Mask', # (DNG 1.6)
0xffffffff => 'invalid', #(found in E5700 NEF's)
BITMASK => {
0 => 'Reduced resolution',
1 => 'Single page',
2 => 'Transparency mask',
3 => 'TIFF/IT final page', #20 (repurposed as DepthMap repurposes by DNG 1.5)
4 => 'TIFF-FX mixed raster content', #20 (repurposed as EnhancedImageData by DNG 1.5)
},
);
# PrintConv for parameter tags
%Image::ExifTool::Exif::printParameter = (
PrintConv => {
0 => 'Normal',
OTHER => \&Image::ExifTool::Exif::PrintParameter,
},
);
# convert DNG UTF-8 string values (may be string or int8u format)
my %utf8StringConv = (
Writable => 'string',
Format => 'string',
ValueConv => '$self->Decode($val, "UTF8")',
ValueConvInv => '$self->Encode($val,"UTF8")',
);
# ValueConv that makes long values binary type
my %longBin = (
ValueConv => 'length($val) > 64 ? \$val : $val',
ValueConvInv => '$val',
LongBinary => 1, # flag to avoid decoding values of a large array
);
# PrintConv for SampleFormat (0x153)
my %sampleFormat = (
1 => 'Unsigned', # unsigned integer
2 => 'Signed', # two's complement signed integer
3 => 'Float', # IEEE floating point
4 => 'Undefined',
5 => 'Complex int', # complex integer (ref 3)
6 => 'Complex float', # complex IEEE floating point (ref 3)
);
# save the values of these tags for additional validation checks
%saveForValidate = (
0x100 => 1, # ImageWidth
0x101 => 1, # ImageHeight
0x102 => 1, # BitsPerSample
0x103 => 1, # Compression
0x115 => 1, # SamplesPerPixel
);
# conversions for DNG OpcodeList tags
my %opcodeInfo = (
Writable => 'undef',
WriteGroup => 'SubIFD',
Protected => 1,
Binary => 1,
ConvertBinary => 1, # needed because the ValueConv value is binary
PrintConvColumns => 2,
PrintConv => {
OTHER => \&PrintOpcode,
1 => 'WarpRectilinear',
2 => 'WarpFisheye',
3 => 'FixVignetteRadial',
4 => 'FixBadPixelsConstant',
5 => 'FixBadPixelsList',
6 => 'TrimBounds',
7 => 'MapTable',
8 => 'MapPolynomial',
9 => 'GainMap',
10 => 'DeltaPerRow',
11 => 'DeltaPerColumn',
12 => 'ScalePerRow',
13 => 'ScalePerColumn',
14 => 'WarpRectilinear2', # (DNG 1.6)
},
PrintConvInv => undef, # (so the inverse conversion is not performed)
);
# main EXIF tag table
%Image::ExifTool::Exif::Main = (
GROUPS => { 0 => 'EXIF', 1 => 'IFD0', 2 => 'Image'},
WRITE_PROC => \&WriteExif,
CHECK_PROC => \&CheckExif,
WRITE_GROUP => 'ExifIFD', # default write group
SET_GROUP1 => 1, # set group1 name to directory name for all tags in table
0x1 => {
Name => 'InteropIndex',
Description => 'Interoperability Index',
Protected => 1,
Writable => 'string',
WriteGroup => 'InteropIFD',
PrintConv => {
R98 => 'R98 - DCF basic file (sRGB)',
R03 => 'R03 - DCF option file (Adobe RGB)',
THM => 'THM - DCF thumbnail file',
},
},
0x2 => { #5
Name => 'InteropVersion',
Description => 'Interoperability Version',
Protected => 1,
Writable => 'undef',
Mandatory => 1,
WriteGroup => 'InteropIFD',
RawConv => '$val=~s/\0+$//; $val', # (some idiots add null terminators)
},
0x0b => { #PH
Name => 'ProcessingSoftware',
Writable => 'string',
WriteGroup => 'IFD0',
Notes => 'used by ACD Systems Digital Imaging',
},
0xfe => {
Name => 'SubfileType',
Notes => 'called NewSubfileType by the TIFF specification',
Protected => 1,
Writable => 'int32u',
WriteGroup => 'IFD0',
# set priority directory if this is the full resolution image
DataMember => 'SubfileType',
RawConv => q{
if ($val == ($val & 0x02)) {
$self->SetPriorityDir() if $val == 0;
$$self{PageCount} = ($$self{PageCount} || 0) + 1;
$$self{MultiPage} = 1 if $val == 2 or $$self{PageCount} > 1;
}
$$self{SubfileType} = $val;
},
PrintConv => \%subfileType,
},
0xff => {
Name => 'OldSubfileType',
Notes => 'called SubfileType by the TIFF specification',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
# set priority directory if this is the full resolution image
RawConv => q{
if ($val == 1 or $val == 3) {
$self->SetPriorityDir() if $val == 1;
$$self{PageCount} = ($$self{PageCount} || 0) + 1;
$$self{MultiPage} = 1 if $val == 3 or $$self{PageCount} > 1;
}
$val;
},
PrintConv => {
1 => 'Full-resolution image',
2 => 'Reduced-resolution image',
3 => 'Single page of multi-page image',
},
},
0x100 => {
Name => 'ImageWidth',
# even though Group 1 is set dynamically we need to register IFD1 once
# so it will show up in the group lists
Groups => { 1 => 'IFD1' },
Protected => 1,
Writable => 'int32u',
WriteGroup => 'IFD0',
# Note: priority 0 tags automatically have their priority increased for the
# priority directory (the directory with a SubfileType of "Full-resolution image")
Priority => 0,
},
0x101 => {
Name => 'ImageHeight',
Notes => 'called ImageLength by the EXIF spec.',
Protected => 1,
Writable => 'int32u',
WriteGroup => 'IFD0',
Priority => 0,
},
0x102 => {
Name => 'BitsPerSample',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
Count => -1, # can be 1 or 3: -1 means 'variable'
Priority => 0,
},
0x103 => {
Name => 'Compression',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
Mandatory => 1,
DataMember => 'Compression',
SeparateTable => 'Compression',
RawConv => q{
Image::ExifTool::Exif::IdentifyRawFile($self, $val);
return $$self{Compression} = $val;
},
PrintConv => \%compression,
Priority => 0,
},
0x106 => {
Name => 'PhotometricInterpretation',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
PrintConv => \%photometricInterpretation,
Priority => 0,
},
0x107 => {
Name => 'Thresholding',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
PrintConv => {
1 => 'No dithering or halftoning',
2 => 'Ordered dither or halftone',
3 => 'Randomized dither',
},
},
0x108 => {
Name => 'CellWidth',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
},
0x109 => {
Name => 'CellLength',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
},
0x10a => {
Name => 'FillOrder',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
PrintConv => {
1 => 'Normal',
2 => 'Reversed',
},
},
0x10d => {
Name => 'DocumentName',
Writable => 'string',
WriteGroup => 'IFD0',
},
0x10e => {
Name => 'ImageDescription',
Writable => 'string',
WriteGroup => 'IFD0',
Priority => 0,
},
0x10f => {
Name => 'Make',
Groups => { 2 => 'Camera' },
Writable => 'string',
WriteGroup => 'IFD0',
DataMember => 'Make',
# remove trailing blanks and save as an ExifTool member variable
RawConv => '$val =~ s/\s+$//; $$self{Make} = $val',
# NOTE: trailing "blanks" (spaces) are removed from all EXIF tags which
# may be "unknown" (filled with spaces) according to the EXIF spec.
# This allows conditional replacement with "exiftool -TAG-= -TAG=VALUE".
# - also removed are any other trailing whitespace characters
},
0x110 => {
Name => 'Model',
Description => 'Camera Model Name',
Groups => { 2 => 'Camera' },
Writable => 'string',
WriteGroup => 'IFD0',
DataMember => 'Model',
# remove trailing blanks and save as an ExifTool member variable
RawConv => '$val =~ s/\s+$//; $$self{Model} = $val',
},
0x111 => [
{
Condition => q[
$$self{TIFF_TYPE} eq 'MRW' and $$self{DIR_NAME} eq 'IFD0' and
$$self{Model} =~ /^DiMAGE A200/
],
Name => 'StripOffsets',
IsOffset => 1,
OffsetPair => 0x117, # point to associated byte counts
# A200 stores this information in the wrong byte order!!
ValueConv => '$val=join(" ",unpack("N*",pack("V*",split(" ",$val))));\$val',
ByteOrder => 'LittleEndian',
},
{
# (APP1 IFD2 is for Leica JPEG preview)
Condition => q[
not ($$self{TIFF_TYPE} eq 'CR2' and $$self{DIR_NAME} eq 'IFD0') and
not ($$self{TIFF_TYPE} =~ /^(DNG|TIFF)$/ and $$self{Compression} eq '7' and $$self{SubfileType} ne '0') and
not ($$self{TIFF_TYPE} eq 'APP1' and $$self{DIR_NAME} eq 'IFD2')
],
Name => 'StripOffsets',
IsOffset => 1,
OffsetPair => 0x117, # point to associated byte counts
ValueConv => 'length($val) > 32 ? \$val : $val',
},
{
# PreviewImageStart in IFD0 of CR2 images
Condition => '$$self{TIFF_TYPE} eq "CR2"',
Name => 'PreviewImageStart',
IsOffset => 1,
OffsetPair => 0x117,
Notes => q{
called StripOffsets in most locations, but it is PreviewImageStart in IFD0
of CR2 images and various IFD's of DNG images except for SubIFD2 where it is
JpgFromRawStart
},
DataTag => 'PreviewImage',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 2,
Permanent => 1,
},
{
# PreviewImageStart in various IFD's of DNG images except SubIFD2
Condition => '$$self{DIR_NAME} ne "SubIFD2"',
Name => 'PreviewImageStart',
IsOffset => 1,
OffsetPair => 0x117,
DataTag => 'PreviewImage',
Writable => 'int32u',
WriteGroup => 'All', # (writes to specific group of associated Composite tag)
Protected => 2,
Permanent => 1,
},
{
# JpgFromRawStart in various IFD's of DNG images except SubIFD2
Name => 'JpgFromRawStart',
IsOffset => 1,
OffsetPair => 0x117,
DataTag => 'JpgFromRaw',
Writable => 'int32u',
WriteGroup => 'SubIFD2',
Protected => 2,
Permanent => 1,
},
],
0x112 => {
Name => 'Orientation',
Writable => 'int16u',
WriteGroup => 'IFD0',
PrintConv => \%orientation,
Priority => 0, # so PRIORITY_DIR takes precedence
},
0x115 => {
Name => 'SamplesPerPixel',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
Priority => 0,
},
0x116 => {
Name => 'RowsPerStrip',
Protected => 1,
Writable => 'int32u',
WriteGroup => 'IFD0',
Priority => 0,
},
0x117 => [
{
Condition => q[
$$self{TIFF_TYPE} eq 'MRW' and $$self{DIR_NAME} eq 'IFD0' and
$$self{Model} =~ /^DiMAGE A200/
],
Name => 'StripByteCounts',
OffsetPair => 0x111, # point to associated offset
# A200 stores this information in the wrong byte order!!
ValueConv => '$val=join(" ",unpack("N*",pack("V*",split(" ",$val))));\$val',
ByteOrder => 'LittleEndian',
},
{
# (APP1 IFD2 is for Leica JPEG preview)
Condition => q[
not ($$self{TIFF_TYPE} eq 'CR2' and $$self{DIR_NAME} eq 'IFD0') and
not ($$self{TIFF_TYPE} =~ /^(DNG|TIFF)$/ and $$self{Compression} eq '7' and $$self{SubfileType} ne '0') and
not ($$self{TIFF_TYPE} eq 'APP1' and $$self{DIR_NAME} eq 'IFD2')
],
Name => 'StripByteCounts',
OffsetPair => 0x111, # point to associated offset
ValueConv => 'length($val) > 32 ? \$val : $val',
},
{
# PreviewImageLength in IFD0 of CR2 images
Condition => '$$self{TIFF_TYPE} eq "CR2"',
Name => 'PreviewImageLength',
OffsetPair => 0x111,
Notes => q{
called StripByteCounts in most locations, but it is PreviewImageLength in
IFD0 of CR2 images and various IFD's of DNG images except for SubIFD2 where
it is JpgFromRawLength
},
DataTag => 'PreviewImage',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 2,
Permanent => 1,
},
{
# PreviewImageLength in various IFD's of DNG images except SubIFD2
Condition => '$$self{DIR_NAME} ne "SubIFD2"',
Name => 'PreviewImageLength',
OffsetPair => 0x111,
DataTag => 'PreviewImage',
Writable => 'int32u',
WriteGroup => 'All', # (writes to specific group of associated Composite tag)
Protected => 2,
Permanent => 1,
},
{
# JpgFromRawLength in SubIFD2 of DNG images
Name => 'JpgFromRawLength',
OffsetPair => 0x111,
DataTag => 'JpgFromRaw',
Writable => 'int32u',
WriteGroup => 'SubIFD2',
Protected => 2,
Permanent => 1,
},
],
0x118 => {
Name => 'MinSampleValue',
Writable => 'int16u',
WriteGroup => 'IFD0',
},
0x119 => {
Name => 'MaxSampleValue',
Writable => 'int16u',
WriteGroup => 'IFD0',
},
0x11a => {
Name => 'XResolution',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Mandatory => 1,
Priority => 0, # so PRIORITY_DIR takes precedence
},
0x11b => {
Name => 'YResolution',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Mandatory => 1,
Priority => 0,
},
0x11c => {
Name => 'PlanarConfiguration',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
PrintConv => {
1 => 'Chunky',
2 => 'Planar',
},
Priority => 0,
},
0x11d => {
Name => 'PageName',
Writable => 'string',
WriteGroup => 'IFD0',
},
0x11e => {
Name => 'XPosition',
Writable => 'rational64u',
WriteGroup => 'IFD0',
},
0x11f => {
Name => 'YPosition',
Writable => 'rational64u',
WriteGroup => 'IFD0',
},
# FreeOffsets/FreeByteCounts are used by Ricoh for RMETA information
# in TIFF images (not yet supported)
0x120 => {
Name => 'FreeOffsets',
IsOffset => 1,
OffsetPair => 0x121,
ValueConv => 'length($val) > 32 ? \$val : $val',
},
0x121 => {
Name => 'FreeByteCounts',
OffsetPair => 0x120,
ValueConv => 'length($val) > 32 ? \$val : $val',
},
0x122 => {
Name => 'GrayResponseUnit',
Writable => 'int16u',
WriteGroup => 'IFD0',
PrintConv => { #3
1 => 0.1,
2 => 0.001,
3 => 0.0001,
4 => 0.00001,
5 => 0.000001,
},
},
0x123 => {
Name => 'GrayResponseCurve',
Binary => 1,
},
0x124 => {
Name => 'T4Options',
PrintConv => { BITMASK => {
0 => '2-Dimensional encoding',
1 => 'Uncompressed',
2 => 'Fill bits added',
} }, #3
},
0x125 => {
Name => 'T6Options',
PrintConv => { BITMASK => {
1 => 'Uncompressed',
} }, #3
},
0x128 => {
Name => 'ResolutionUnit',
Notes => 'the value 1 is not standard EXIF',
Writable => 'int16u',
WriteGroup => 'IFD0',
Mandatory => 1,
PrintConv => {
1 => 'None',
2 => 'inches',
3 => 'cm',
},
Priority => 0,
},
0x129 => {
Name => 'PageNumber',
Writable => 'int16u',
WriteGroup => 'IFD0',
Count => 2,
},
0x12c => 'ColorResponseUnit', #9
0x12d => {
Name => 'TransferFunction',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
Count => 768,
Binary => 1,
},
0x131 => {
Name => 'Software',
Writable => 'string',
WriteGroup => 'IFD0',
DataMember => 'Software',
RawConv => '$val =~ s/\s+$//; $$self{Software} = $val', # trim trailing blanks
},
0x132 => {
Name => 'ModifyDate',
Groups => { 2 => 'Time' },
Notes => 'called DateTime by the EXIF spec.',
Writable => 'string',
Shift => 'Time',
WriteGroup => 'IFD0',
Validate => 'ValidateExifDate($val)',
PrintConv => '$self->ConvertDateTime($val)',
PrintConvInv => '$self->InverseDateTime($val,0)',
},
0x13b => {
Name => 'Artist',
Groups => { 2 => 'Author' },
Notes => 'becomes a list-type tag when the MWG module is loaded',
Writable => 'string',
WriteGroup => 'IFD0',
RawConv => '$val =~ s/\s+$//; $val', # trim trailing blanks
},
0x13c => {
Name => 'HostComputer',
Writable => 'string',
WriteGroup => 'IFD0',
},
0x13d => {
Name => 'Predictor',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
PrintConv => {
1 => 'None',
2 => 'Horizontal differencing',
3 => 'Floating point', # (DNG 1.5)
34892 => 'Horizontal difference X2', # (DNG 1.5)
34893 => 'Horizontal difference X4', # (DNG 1.5)
34894 => 'Floating point X2', # (DNG 1.5)
34895 => 'Floating point X4', # (DNG 1.5)
},
},
0x13e => {
Name => 'WhitePoint',
Groups => { 2 => 'Camera' },
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => 2,
},
0x13f => {
Name => 'PrimaryChromaticities',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => 6,
Priority => 0,
},
0x140 => {
Name => 'ColorMap',
Format => 'binary',
Binary => 1,
},
0x141 => {
Name => 'HalftoneHints',
Writable => 'int16u',
WriteGroup => 'IFD0',
Count => 2,
},
0x142 => {
Name => 'TileWidth',
Protected => 1,
Writable => 'int32u',
WriteGroup => 'IFD0',
},
0x143 => {
Name => 'TileLength',
Protected => 1,
Writable => 'int32u',
WriteGroup => 'IFD0',
},
0x144 => {
Name => 'TileOffsets',
IsOffset => 1,
OffsetPair => 0x145,
ValueConv => 'length($val) > 32 ? \$val : $val',
},
0x145 => {
Name => 'TileByteCounts',
OffsetPair => 0x144,
ValueConv => 'length($val) > 32 ? \$val : $val',
},
0x146 => 'BadFaxLines', #3
0x147 => { #3
Name => 'CleanFaxData',
PrintConv => {
0 => 'Clean',
1 => 'Regenerated',
2 => 'Unclean',
},
},
0x148 => 'ConsecutiveBadFaxLines', #3
0x14a => [
{
Name => 'SubIFD',
# use this opportunity to identify an ARW image, and if so we
# must decide if this is a SubIFD or the A100 raw data
# (use SubfileType, Compression and FILE_TYPE to identify ARW/SR2,
# then call SetARW to finish the job)
Condition => q{
$$self{DIR_NAME} ne 'IFD0' or $$self{FILE_TYPE} ne 'TIFF' or
$$self{Make} !~ /^SONY/ or
not $$self{SubfileType} or $$self{SubfileType} != 1 or
not $$self{Compression} or $$self{Compression} != 6 or
not require Image::ExifTool::Sony or
Image::ExifTool::Sony::SetARW($self, $valPt)
},
Groups => { 1 => 'SubIFD' },
Flags => 'SubIFD',
SubDirectory => {
Start => '$val',
MaxSubdirs => 10, # (have seen 5 in a DNG 1.4 image)
},
},
{ #16
Name => 'A100DataOffset',
Notes => 'the data offset in original Sony DSLR-A100 ARW images',
DataMember => 'A100DataOffset',
RawConv => '$$self{A100DataOffset} = $val',
WriteGroup => 'IFD0', # (only for Validate)
IsOffset => 1,
Protected => 2,
},
],
0x14c => {
Name => 'InkSet',
Writable => 'int16u',
WriteGroup => 'IFD0',
PrintConv => { #3
1 => 'CMYK',
2 => 'Not CMYK',
},
},
0x14d => 'InkNames', #3
0x14e => 'NumberofInks', #3
0x150 => 'DotRange',
0x151 => {
Name => 'TargetPrinter',
Writable => 'string',
WriteGroup => 'IFD0',
},
0x152 => {
Name => 'ExtraSamples',
PrintConv => { #20
0 => 'Unspecified',
1 => 'Associated Alpha',
2 => 'Unassociated Alpha',
},
},
0x153 => {
Name => 'SampleFormat',
Notes => 'SamplesPerPixel values',
WriteGroup => 'SubIFD', # (only for Validate)
PrintConvColumns => 2,
PrintConv => [ \%sampleFormat, \%sampleFormat, \%sampleFormat, \%sampleFormat ],
},
0x154 => 'SMinSampleValue',
0x155 => 'SMaxSampleValue',
0x156 => 'TransferRange',
0x157 => 'ClipPath', #3
0x158 => 'XClipPathUnits', #3
0x159 => 'YClipPathUnits', #3
0x15a => { #3
Name => 'Indexed',
PrintConv => { 0 => 'Not indexed', 1 => 'Indexed' },
},
0x15b => {
Name => 'JPEGTables',
Binary => 1,
},
0x15f => { #10
Name => 'OPIProxy',
PrintConv => {
0 => 'Higher resolution image does not exist',
1 => 'Higher resolution image exists',
},
},
# 0x181 => 'Decode', #20 (typo! - should be 0x1b1, ref 21)
# 0x182 => 'DefaultImageColor', #20 (typo! - should be 0x1b2, ref 21)
0x190 => { #3
Name => 'GlobalParametersIFD',
Groups => { 1 => 'GlobParamIFD' },
Flags => 'SubIFD',
SubDirectory => {
DirName => 'GlobParamIFD',
Start => '$val',
MaxSubdirs => 1,
},
},
0x191 => { #3
Name => 'ProfileType',
PrintConv => { 0 => 'Unspecified', 1 => 'Group 3 FAX' },
},
0x192 => { #3
Name => 'FaxProfile',
PrintConv => {
0 => 'Unknown',
1 => 'Minimal B&W lossless, S',
2 => 'Extended B&W lossless, F',
3 => 'Lossless JBIG B&W, J',
4 => 'Lossy color and grayscale, C',
5 => 'Lossless color and grayscale, L',
6 => 'Mixed raster content, M',
7 => 'Profile T', #20
255 => 'Multi Profiles', #20
},
},
0x193 => { #3
Name => 'CodingMethods',
PrintConv => { BITMASK => {
0 => 'Unspecified compression',
1 => 'Modified Huffman',
2 => 'Modified Read',
3 => 'Modified MR',
4 => 'JBIG',
5 => 'Baseline JPEG',
6 => 'JBIG color',
} },
},
0x194 => 'VersionYear', #3
0x195 => 'ModeNumber', #3
0x1b1 => 'Decode', #3
0x1b2 => 'DefaultImageColor', #3 (changed to ImageBaseColor, ref 21)
0x1b3 => 'T82Options', #20
0x1b5 => { #19
Name => 'JPEGTables',
Binary => 1,
},
0x200 => {
Name => 'JPEGProc',
PrintConv => {
1 => 'Baseline',
14 => 'Lossless',
},
},
0x201 => [
{
Name => 'ThumbnailOffset',
Notes => q{
ThumbnailOffset in IFD1 of JPEG and some TIFF-based images, IFD0 of MRW
images and AVI and MOV videos, and the SubIFD in IFD1 of SRW images;
PreviewImageStart in MakerNotes and IFD0 of ARW and SR2 images;
JpgFromRawStart in SubIFD of NEF images and IFD2 of PEF images; and
OtherImageStart in everything else
},
# thumbnail is found in IFD1 of JPEG and TIFF images, and
# IFD0 of EXIF information in FujiFilm AVI (RIFF) and MOV videos
Condition => q{
# recognize NRW file from a JPEG-compressed thumbnail in IFD0
if ($$self{TIFF_TYPE} eq 'NEF' and $$self{DIR_NAME} eq 'IFD0' and $$self{Compression} == 6) {
$self->OverrideFileType($$self{TIFF_TYPE} = 'NRW');
}
$$self{DIR_NAME} eq 'IFD1' or
($$self{DIR_NAME} eq 'IFD0' and $$self{FILE_TYPE} =~ /^(RIFF|MOV)$/)
},
IsOffset => 1,
OffsetPair => 0x202,
DataTag => 'ThumbnailImage',
Writable => 'int32u',
WriteGroup => 'IFD1',
# according to the EXIF spec. a JPEG-compressed thumbnail image may not
# be stored in a TIFF file, but these TIFF-based RAW image formats
# use IFD1 for a JPEG-compressed thumbnail: CR2, ARW, SR2 and PEF.
# (SRF also stores a JPEG image in IFD1, but it is actually a preview
# and we don't yet write SRF anyway)
WriteCondition => q{
$$self{FILE_TYPE} ne "TIFF" or
$$self{TIFF_TYPE} =~ /^(CR2|ARW|SR2|PEF)$/
},
Protected => 2,
},
{
Name => 'ThumbnailOffset',
# thumbnail in IFD0 of MRW images (Minolta A200)
# and IFD0 of NRW images (Nikon Coolpix P6000,P7000,P7100)
Condition => '$$self{DIR_NAME} eq "IFD0" and $$self{TIFF_TYPE} =~ /^(MRW|NRW)$/',
IsOffset => 1,
OffsetPair => 0x202,
# A200 uses the wrong base offset for this pointer!!
WrongBase => '$$self{Model} =~ /^DiMAGE A200/ ? $$self{MRW_WrongBase} : undef',
DataTag => 'ThumbnailImage',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 2,
Permanent => 1,
},
{
Name => 'ThumbnailOffset',
# in SubIFD of IFD1 in Samsung SRW images
Condition => q{
$$self{TIFF_TYPE} eq 'SRW' and $$self{DIR_NAME} eq 'SubIFD' and
$$self{PATH}[-2] eq 'IFD1'
},
IsOffset => 1,
OffsetPair => 0x202,
DataTag => 'ThumbnailImage',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Protected => 2,
Permanent => 1,
},
{
Name => 'PreviewImageStart',
Condition => '$$self{DIR_NAME} eq "MakerNotes"',
IsOffset => 1,
OffsetPair => 0x202,
DataTag => 'PreviewImage',
Writable => 'int32u',
WriteGroup => 'MakerNotes',
Protected => 2,
Permanent => 1,
},
{
Name => 'PreviewImageStart',
# PreviewImage in IFD0 of ARW and SR2 files for all models
Condition => '$$self{DIR_NAME} eq "IFD0" and $$self{TIFF_TYPE} =~ /^(ARW|SR2)$/',
IsOffset => 1,
OffsetPair => 0x202,
DataTag => 'PreviewImage',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 2,
Permanent => 1,
},
{
Name => 'JpgFromRawStart',
Condition => '$$self{DIR_NAME} eq "SubIFD"',
IsOffset => 1,
OffsetPair => 0x202,
DataTag => 'JpgFromRaw',
Writable => 'int32u',
WriteGroup => 'SubIFD',
# JpgFromRaw is in SubIFD of NEF, NRW and SRW files
Protected => 2,
Permanent => 1,
},
{
Name => 'JpgFromRawStart',
Condition => '$$self{DIR_NAME} eq "IFD2"',
IsOffset => 1,
OffsetPair => 0x202,
DataTag => 'JpgFromRaw',
Writable => 'int32u',
WriteGroup => 'IFD2',
# JpgFromRaw is in IFD2 of PEF files
Protected => 2,
Permanent => 1,
},
{
Name => 'OtherImageStart',
Condition => '$$self{DIR_NAME} eq "SubIFD1"',
IsOffset => 1,
OffsetPair => 0x202,
DataTag => 'OtherImage',
Writable => 'int32u',
WriteGroup => 'SubIFD1',
Protected => 2,
Permanent => 1,
},
{
Name => 'OtherImageStart',
Condition => '$$self{DIR_NAME} eq "SubIFD2"',
IsOffset => 1,
OffsetPair => 0x202,
DataTag => 'OtherImage',
Writable => 'int32u',
WriteGroup => 'SubIFD2',
Protected => 2,
Permanent => 1,
},
{
Name => 'OtherImageStart',
IsOffset => 1,
OffsetPair => 0x202,
},
],
0x202 => [
{
Name => 'ThumbnailLength',
Notes => q{
ThumbnailLength in IFD1 of JPEG and some TIFF-based images, IFD0 of MRW
images and AVI and MOV videos, and the SubIFD in IFD1 of SRW images;
PreviewImageLength in MakerNotes and IFD0 of ARW and SR2 images;
JpgFromRawLength in SubIFD of NEF images, and IFD2 of PEF images; and
OtherImageLength in everything else
},
Condition => q{
$$self{DIR_NAME} eq 'IFD1' or
($$self{DIR_NAME} eq 'IFD0' and $$self{FILE_TYPE} =~ /^(RIFF|MOV)$/)
},
OffsetPair => 0x201,
DataTag => 'ThumbnailImage',
Writable => 'int32u',
WriteGroup => 'IFD1',
WriteCondition => q{
$$self{FILE_TYPE} ne "TIFF" or
$$self{TIFF_TYPE} =~ /^(CR2|ARW|SR2|PEF)$/
},
Protected => 2,
},
{
Name => 'ThumbnailLength',
# thumbnail in IFD0 of MRW images (Minolta A200)
# and IFD0 of NRW images (Nikon Coolpix P6000,P7000,P7100)
Condition => '$$self{DIR_NAME} eq "IFD0" and $$self{TIFF_TYPE} =~ /^(MRW|NRW)$/',
OffsetPair => 0x201,
DataTag => 'ThumbnailImage',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 2,
Permanent => 1,
},
{
Name => 'ThumbnailLength',
# in SubIFD of IFD1 in Samsung SRW images
Condition => q{
$$self{TIFF_TYPE} eq 'SRW' and $$self{DIR_NAME} eq 'SubIFD' and
$$self{PATH}[-2] eq 'IFD1'
},
OffsetPair => 0x201,
DataTag => 'ThumbnailImage',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Protected => 2,
Permanent => 1,
},
{
Name => 'PreviewImageLength',
Condition => '$$self{DIR_NAME} eq "MakerNotes"',
OffsetPair => 0x201,
DataTag => 'PreviewImage',
Writable => 'int32u',
WriteGroup => 'MakerNotes',
Protected => 2,
Permanent => 1,
},
{
Name => 'PreviewImageLength',
# PreviewImage in IFD0 of ARW and SR2 files for all models
Condition => '$$self{DIR_NAME} eq "IFD0" and $$self{TIFF_TYPE} =~ /^(ARW|SR2)$/',
OffsetPair => 0x201,
DataTag => 'PreviewImage',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 2,
Permanent => 1,
},
{
Name => 'JpgFromRawLength',
Condition => '$$self{DIR_NAME} eq "SubIFD"',
OffsetPair => 0x201,
DataTag => 'JpgFromRaw',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Protected => 2,
Permanent => 1,
},
{
Name => 'JpgFromRawLength',
Condition => '$$self{DIR_NAME} eq "IFD2"',
OffsetPair => 0x201,
DataTag => 'JpgFromRaw',
Writable => 'int32u',
WriteGroup => 'IFD2',
Protected => 2,
Permanent => 1,
},
{
Name => 'OtherImageLength',
Condition => '$$self{DIR_NAME} eq "SubIFD1"',
OffsetPair => 0x201,
DataTag => 'OtherImage',
Writable => 'int32u',
WriteGroup => 'SubIFD1',
Protected => 2,
Permanent => 1,
},
{
Name => 'OtherImageLength',
Condition => '$$self{DIR_NAME} eq "SubIFD2"',
OffsetPair => 0x201,
DataTag => 'OtherImage',
Writable => 'int32u',
WriteGroup => 'SubIFD2',
Protected => 2,
Permanent => 1,
},
{
Name => 'OtherImageLength',
OffsetPair => 0x201,
},
],
0x203 => 'JPEGRestartInterval',
0x205 => 'JPEGLosslessPredictors',
0x206 => 'JPEGPointTransforms',
0x207 => {
Name => 'JPEGQTables',
IsOffset => 1,
# this tag is not supported for writing, so define an
# invalid offset pair to cause a "No size tag" error to be
# generated if we try to write a file containing this tag
OffsetPair => -1,
},
0x208 => {
Name => 'JPEGDCTables',
IsOffset => 1,
OffsetPair => -1, # (see comment for JPEGQTables)
},
0x209 => {
Name => 'JPEGACTables',
IsOffset => 1,
OffsetPair => -1, # (see comment for JPEGQTables)
},
0x211 => {
Name => 'YCbCrCoefficients',
Protected => 1,
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => 3,
Priority => 0,
},
0x212 => {
Name => 'YCbCrSubSampling',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
Count => 2,
PrintConvColumns => 2,
PrintConv => \%Image::ExifTool::JPEG::yCbCrSubSampling,
Priority => 0,
},
0x213 => {
Name => 'YCbCrPositioning',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'IFD0',
Mandatory => 1,
PrintConv => {
1 => 'Centered',
2 => 'Co-sited',
},
Priority => 0,
},
0x214 => {
Name => 'ReferenceBlackWhite',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => 6,
Priority => 0,
},
# 0x220 - int32u: 0 (IFD0, Xaiomi Redmi models)
# 0x221 - int32u: 0 (IFD0, Xaiomi Redmi models)
# 0x222 - int32u: 0 (IFD0, Xaiomi Redmi models)
# 0x223 - int32u: 0 (IFD0, Xaiomi Redmi models)
# 0x224 - int32u: 0,1 (IFD0, Xaiomi Redmi models)
# 0x225 - string: "" (IFD0, Xaiomi Redmi models)
0x22f => 'StripRowCounts',
0x2bc => {
Name => 'ApplicationNotes', # (writable directory!)
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0', # (only for Validate)
Flags => [ 'Binary', 'Protected' ],
# this could be an XMP block
SubDirectory => {
DirName => 'XMP',
TagTable => 'Image::ExifTool::XMP::Main',
},
},
0x3e7 => 'USPTOMiscellaneous', #20
0x1000 => { #5
Name => 'RelatedImageFileFormat',
Protected => 1,
Writable => 'string',
WriteGroup => 'InteropIFD',
},
0x1001 => { #5
Name => 'RelatedImageWidth',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'InteropIFD',
},
0x1002 => { #5
Name => 'RelatedImageHeight',
Notes => 'called RelatedImageLength by the DCF spec.',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'InteropIFD',
},
# (0x474x tags written by MicrosoftPhoto)
0x4746 => { #PH
Name => 'Rating',
Writable => 'int16u',
WriteGroup => 'IFD0',
Avoid => 1,
},
0x4747 => { # (written by Digital Image Pro)
Name => 'XP_DIP_XML',
Format => 'undef',
# the following reference indicates this is Unicode:
# http://social.msdn.microsoft.com/Forums/en-US/isvvba/thread/ce6edcbb-8fc2-40c6-ad98-85f5d835ddfb
ValueConv => '$self->Decode($val,"UCS2","II")',
},
0x4748 => {
Name => 'StitchInfo',
SubDirectory => {
TagTable => 'Image::ExifTool::Microsoft::Stitch',
ByteOrder => 'LittleEndian', #PH (NC)
},
},
0x4749 => { #PH
Name => 'RatingPercent',
Writable => 'int16u',
WriteGroup => 'IFD0',
Avoid => 1,
},
0x7000 => { #JR
Name => 'SonyRawFileType',
# (only valid if Sony:FileFormat >= ARW 2.0, ref IB)
# Writable => 'int16u', (don't allow writes for now)
PrintConv => {
0 => 'Sony Uncompressed 14-bit RAW',
1 => 'Sony Uncompressed 12-bit RAW', #IB
2 => 'Sony Compressed RAW', # (lossy, ref IB)
3 => 'Sony Lossless Compressed RAW', #IB
4 => 'Sony Lossless Compressed RAW 2', #JR (ILCE-1)
},
},
# 0x7001 - int16u[1] (in SubIFD of Sony ARW images) - values: 0,1
0x7010 => { #IB
Name => 'SonyToneCurve',
# int16u[4] (in SubIFD of Sony ARW images -- don't allow writes for now)
# - only the middle 4 points are stored (lower comes from black level,
# and upper from data maximum)
},
# 0x7011 - int16u[4] (in SubIFD of Sony ARW images) - values: "0 4912 8212 12287","4000 7200 10050 12075"
# 0x7020 - int32u[1] (in SubIFD of Sony ARW images) - values: 0,3
0x7031 => {
Name => 'VignettingCorrection',
Notes => 'found in Sony ARW images',
Protected => 1,
Writable => 'int16s',
WriteGroup => 'SubIFD',
PrintConv => {
256 => 'Off',
257 => 'Auto',
272 => 'Auto (ILCE-1)', #JR
511 => 'No correction params available',
},
},
0x7032 => {
Name => 'VignettingCorrParams', #forum7640
Notes => 'found in Sony ARW images',
Protected => 1,
Writable => 'int16s',
WriteGroup => 'SubIFD',
Count => 17,
},
0x7034 => {
Name => 'ChromaticAberrationCorrection',
Notes => 'found in Sony ARW images',
Protected => 1,
Writable => 'int16s',
WriteGroup => 'SubIFD',
PrintConv => {
0 => 'Off',
1 => 'Auto',
255 => 'No correction params available',
},
},
0x7035 => {
Name => 'ChromaticAberrationCorrParams', #forum6509
Notes => 'found in Sony ARW images',
Protected => 1,
Writable => 'int16s',
WriteGroup => 'SubIFD',
Count => 33,
},
0x7036 => {
Name => 'DistortionCorrection',
Notes => 'found in Sony ARW images',
Protected => 1,
Writable => 'int16s',
WriteGroup => 'SubIFD',
PrintConv => {
0 => 'Off',
1 => 'Auto',
17 => 'Auto fixed by lens',
255 => 'No correction params available',
},
},
0x7037 => {
Name => 'DistortionCorrParams', #forum6509
Notes => 'found in Sony ARW images',
Protected => 1,
Writable => 'int16s',
WriteGroup => 'SubIFD',
Count => 17,
},
0x74c7 => { #IB (in ARW images from some Sony cameras)
Name => 'SonyCropTopLeft',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Count => 2,
Permanent => 1,
Protected => 1,
},
0x74c8 => { #IB (in ARW images from some Sony cameras)
Name => 'SonyCropSize',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Count => 2,
Permanent => 1,
Protected => 1,
},
0x800d => 'ImageID', #10
0x80a3 => { Name => 'WangTag1', Binary => 1 }, #20
0x80a4 => { Name => 'WangAnnotation', Binary => 1 },
0x80a5 => { Name => 'WangTag3', Binary => 1 }, #20
0x80a6 => { #20
Name => 'WangTag4',
PrintConv => 'length($val) <= 64 ? $val : \$val',
},
# tags 0x80b8-0x80bc are registered to Island Graphics
0x80b9 => 'ImageReferencePoints', #29
0x80ba => 'RegionXformTackPoint', #29
0x80bb => 'WarpQuadrilateral', #29
0x80bc => 'AffineTransformMat', #29
0x80e3 => 'Matteing', #9
0x80e4 => 'DataType', #9
0x80e5 => 'ImageDepth', #9
0x80e6 => 'TileDepth', #9
# tags 0x8214-0x8219 are registered to Pixar
0x8214 => 'ImageFullWidth', #29
0x8215 => 'ImageFullHeight', #29
0x8216 => 'TextureFormat', #29
0x8217 => 'WrapModes', #29
0x8218 => 'FovCot', #29
0x8219 => 'MatrixWorldToScreen', #29
0x821a => 'MatrixWorldToCamera', #29
0x827d => 'Model2', #29 (Eastman Kodak)
0x828d => { #12
Name => 'CFARepeatPatternDim',
Protected => 1,
Writable => 'int16u',
WriteGroup => 'SubIFD',
Count => 2,
},
0x828e => {
Name => 'CFAPattern2', #12
Format => 'int8u', # (written incorrectly as 'undef' in Nikon NRW images)
Protected => 1,
Writable => 'int8u',
WriteGroup => 'SubIFD',
Count => -1,
},
0x828f => { #12
Name => 'BatteryLevel',
Groups => { 2 => 'Camera' },
},
0x8290 => {
Name => 'KodakIFD',
Groups => { 1 => 'KodakIFD' },
Flags => 'SubIFD',
Notes => 'used in various types of Kodak images',
SubDirectory => {
TagTable => 'Image::ExifTool::Kodak::IFD',
DirName => 'KodakIFD',
Start => '$val',
MaxSubdirs => 1,
},
},
0x8298 => {
Name => 'Copyright',
Groups => { 2 => 'Author' },
Format => 'undef',
Writable => 'string',
WriteGroup => 'IFD0',
Notes => q{
may contain copyright notices for photographer and editor, separated by a
newline. As per the EXIF specification, the newline is replaced by a null
byte when writing to file, but this may be avoided by disabling the print
conversion
},
# internally the strings are separated by a null character in this format:
# Photographer only: photographer + NULL
# Both: photographer + NULL + editor + NULL
# Editor only: SPACE + NULL + editor + NULL
# (this is done as a RawConv so conditional replaces will work properly)
RawConv => sub {
my ($val, $self) = @_;
$val =~ s/ *\0/\n/; # translate first NULL to a newline, removing trailing blanks
$val =~ s/ *\0.*//s; # truncate at second NULL and remove trailing blanks
$val =~ s/\n$//; # remove trailing newline if it exists
# decode if necessary (note: this is the only non-'string' EXIF value like this)
my $enc = $self->Options('CharsetEXIF');
$val = $self->Decode($val,$enc) if $enc;
return $val;
},
RawConvInv => '$val . "\0"',
PrintConvInv => sub {
my ($val, $self) = @_;
# encode if necessary (not automatic because Format is 'undef')
my $enc = $self->Options('CharsetEXIF');
$val = $self->Encode($val,$enc) if $enc and $val !~ /\0/;
if ($val =~ /(.*?)\s*[\n\r]+\s*(.*)/s) {
return $1 unless length $2;
# photographer copyright set to ' ' if it doesn't exist, according to spec.
return((length($1) ? $1 : ' ') . "\0" . $2);
}
return $val;
},
},
0x829a => {
Name => 'ExposureTime',
Writable => 'rational64u',
PrintConv => 'Image::ExifTool::Exif::PrintExposureTime($val)',
PrintConvInv => '$val',
},
0x829d => {
Name => 'FNumber',
Writable => 'rational64u',
PrintConv => 'Image::ExifTool::Exif::PrintFNumber($val)',
PrintConvInv => '$val',
},
0x82a5 => { #3
Name => 'MDFileTag',
Notes => 'tags 0x82a5-0x82ac are used in Molecular Dynamics GEL files',
},
0x82a6 => 'MDScalePixel', #3
0x82a7 => 'MDColorTable', #3
0x82a8 => 'MDLabName', #3
0x82a9 => 'MDSampleInfo', #3
0x82aa => 'MDPrepDate', #3
0x82ab => 'MDPrepTime', #3
0x82ac => 'MDFileUnits', #3
0x830e => { #30 (GeoTiff)
Name => 'PixelScale',
Writable => 'double',
WriteGroup => 'IFD0',
Count => 3,
},
0x8335 => 'AdventScale', #20
0x8336 => 'AdventRevision', #20
0x835c => 'UIC1Tag', #23
0x835d => 'UIC2Tag', #23
0x835e => 'UIC3Tag', #23
0x835f => 'UIC4Tag', #23
0x83bb => { #12
Name => 'IPTC-NAA', # (writable directory! -- but see note below)
# this should actually be written as 'undef' (see
# http://www.awaresystems.be/imaging/tiff/tifftags/iptc.html),
# but Photoshop writes it as int32u and Nikon Capture won't read
# anything else, so we do the same thing here... Doh!
Format => 'undef', # convert binary values as undef
Writable => 'int32u', # but write int32u format code in IFD
WriteGroup => 'IFD0',
Flags => [ 'Binary', 'Protected' ],
SubDirectory => {
DirName => 'IPTC',
TagTable => 'Image::ExifTool::IPTC::Main',
},
# Note: This directory may be written as a block via the IPTC-NAA tag,
# but this technique is not recommended. Instead, it is better to
# write the Extra IPTC tag and let ExifTool decide where it should go.
},
0x847e => 'IntergraphPacketData', #3
0x847f => 'IntergraphFlagRegisters', #3
0x8480 => { #30 (GeoTiff, obsolete)
Name => 'IntergraphMatrix',
Writable => 'double',
WriteGroup => 'IFD0',
Count => -1,
},
0x8481 => 'INGRReserved', #20
0x8482 => { #30 (GeoTiff)
Name => 'ModelTiePoint',
Groups => { 2 => 'Location' },
Writable => 'double',
WriteGroup => 'IFD0',
Count => -1,
},
0x84e0 => 'Site', #9
0x84e1 => 'ColorSequence', #9
0x84e2 => 'IT8Header', #9
0x84e3 => { #9
Name => 'RasterPadding',
PrintConv => { #20
0 => 'Byte',
1 => 'Word',
2 => 'Long Word',
9 => 'Sector',
10 => 'Long Sector',
},
},
0x84e4 => 'BitsPerRunLength', #9
0x84e5 => 'BitsPerExtendedRunLength', #9
0x84e6 => 'ColorTable', #9
0x84e7 => { #9
Name => 'ImageColorIndicator',
PrintConv => { #20
0 => 'Unspecified Image Color',
1 => 'Specified Image Color',
},
},
0x84e8 => { #9
Name => 'BackgroundColorIndicator',
PrintConv => { #20
0 => 'Unspecified Background Color',
1 => 'Specified Background Color',
},
},
0x84e9 => 'ImageColorValue', #9
0x84ea => 'BackgroundColorValue', #9
0x84eb => 'PixelIntensityRange', #9
0x84ec => 'TransparencyIndicator', #9
0x84ed => 'ColorCharacterization', #9
0x84ee => { #9
Name => 'HCUsage',
PrintConv => { #20
0 => 'CT',
1 => 'Line Art',
2 => 'Trap',
},
},
0x84ef => 'TrapIndicator', #17
0x84f0 => 'CMYKEquivalent', #17
0x8546 => { #11
Name => 'SEMInfo',
Notes => 'found in some scanning electron microscope images',
Writable => 'string',
WriteGroup => 'IFD0',
},
0x8568 => {
Name => 'AFCP_IPTC',
SubDirectory => {
# must change directory name so we don't create this directory
DirName => 'AFCP_IPTC',
TagTable => 'Image::ExifTool::IPTC::Main',
},
},
0x85b8 => 'PixelMagicJBIGOptions', #20
0x85d7 => 'JPLCartoIFD', #exifprobe (NC)
0x85d8 => { #30 (GeoTiff)
Name => 'ModelTransform',
Groups => { 2 => 'Location' },
Writable => 'double',
WriteGroup => 'IFD0',
Count => 16,
},
0x8602 => { #16
Name => 'WB_GRGBLevels',
Notes => 'found in IFD0 of Leaf MOS images',
},
# 0x8603 - Leaf CatchLight color matrix (ref 16)
0x8606 => {
Name => 'LeafData',
Format => 'undef', # avoid converting huge block to string of int8u's!
SubDirectory => {
DirName => 'LeafIFD',
TagTable => 'Image::ExifTool::Leaf::Main',
},
},
0x8649 => { #19
Name => 'PhotoshopSettings',
Format => 'binary',
WriteGroup => 'IFD0', # (only for Validate)
SubDirectory => {
DirName => 'Photoshop',
TagTable => 'Image::ExifTool::Photoshop::Main',
},
},
0x8769 => {
Name => 'ExifOffset',
Groups => { 1 => 'ExifIFD' },
WriteGroup => 'IFD0', # (only for Validate)
SubIFD => 2,
SubDirectory => {
DirName => 'ExifIFD',
Start => '$val',
},
},
0x8773 => {
Name => 'ICC_Profile',
WriteGroup => 'IFD0', # (only for Validate)
SubDirectory => {
TagTable => 'Image::ExifTool::ICC_Profile::Main',
},
},
0x877f => { #20
Name => 'TIFF_FXExtensions',
PrintConv => { BITMASK => {
0 => 'Resolution/Image Width',
1 => 'N Layer Profile M',
2 => 'Shared Data',
3 => 'B&W JBIG2',
4 => 'JBIG2 Profile M',
}},
},
0x8780 => { #20
Name => 'MultiProfiles',
PrintConv => { BITMASK => {
0 => 'Profile S',
1 => 'Profile F',
2 => 'Profile J',
3 => 'Profile C',
4 => 'Profile L',
5 => 'Profile M',
6 => 'Profile T',
7 => 'Resolution/Image Width',
8 => 'N Layer Profile M',
9 => 'Shared Data',
10 => 'JBIG2 Profile M',
}},
},
0x8781 => { #22
Name => 'SharedData',
IsOffset => 1,
# this tag is not supported for writing, so define an
# invalid offset pair to cause a "No size tag" error to be
# generated if we try to write a file containing this tag
OffsetPair => -1,
},
0x8782 => 'T88Options', #20
0x87ac => 'ImageLayer',
0x87af => { #30
Name => 'GeoTiffDirectory',
Format => 'undef',
Writable => 'int16u',
Notes => q{
these "GeoTiff" tags may read and written as a block, but they aren't
extracted unless specifically requested. Byte order changes are handled
automatically when copying between TIFF images with different byte order
},
WriteGroup => 'IFD0',
Binary => 1,
RawConv => '$val . GetByteOrder()', # save byte order
# swap byte order if necessary
RawConvInv => q{
return $val if length $val < 2;
my $order = substr($val, -2);
return $val unless $order eq 'II' or $order eq 'MM';
$val = substr($val, 0, -2);
return $val if $order eq GetByteOrder();
return pack('v*',unpack('n*',$val));
},
},
0x87b0 => { #30
Name => 'GeoTiffDoubleParams',
Format => 'undef',
Writable => 'double',
WriteGroup => 'IFD0',
Binary => 1,
RawConv => '$val . GetByteOrder()', # save byte order
# swap byte order if necessary
RawConvInv => q{
return $val if length $val < 2;
my $order = substr($val, -2);
return $val unless $order eq 'II' or $order eq 'MM';
$val = substr($val, 0, -2);
return $val if $order eq GetByteOrder();
$val =~ s/(.{4})(.{4})/$2$1/sg; # swap words
return pack('V*',unpack('N*',$val));
},
},
0x87b1 => { #30
Name => 'GeoTiffAsciiParams',
Format => 'undef',
Writable => 'string',
WriteGroup => 'IFD0',
Binary => 1,
},
0x87be => 'JBIGOptions', #29
0x8822 => {
Name => 'ExposureProgram',
Groups => { 2 => 'Camera' },
Notes => 'the value of 9 is not standard EXIF, but is used by the Canon EOS 7D',
Writable => 'int16u',
PrintConv => {
0 => 'Not Defined',
1 => 'Manual',
2 => 'Program AE',
3 => 'Aperture-priority AE',
4 => 'Shutter speed priority AE',
5 => 'Creative (Slow speed)',
6 => 'Action (High speed)',
7 => 'Portrait',
8 => 'Landscape',
9 => 'Bulb', #25
},
},
0x8824 => {
Name => 'SpectralSensitivity',
Groups => { 2 => 'Camera' },
Writable => 'string',
},
0x8825 => {
Name => 'GPSInfo',
Groups => { 1 => 'GPS' },
WriteGroup => 'IFD0', # (only for Validate)
Flags => 'SubIFD',
SubDirectory => {
DirName => 'GPS',
TagTable => 'Image::ExifTool::GPS::Main',
Start => '$val',
MaxSubdirs => 1,
},
},
0x8827 => {
Name => 'ISO',
Notes => q{
called ISOSpeedRatings by EXIF 2.2, then PhotographicSensitivity by the EXIF
2.3 spec.
},
Writable => 'int16u',
Count => -1,
PrintConv => '$val=~s/\s+/, /g; $val',
PrintConvInv => '$val=~tr/,//d; $val',
},
0x8828 => {
Name => 'Opto-ElectricConvFactor',
Notes => 'called OECF by the EXIF spec.',
Binary => 1,
},
0x8829 => 'Interlace', #12
0x882a => { #12
Name => 'TimeZoneOffset',
Writable => 'int16s',
Count => -1, # can be 1 or 2
Notes => q{
1 or 2 values: 1. The time zone offset of DateTimeOriginal from GMT in
hours, 2. If present, the time zone offset of ModifyDate
},
},
0x882b => { #12
Name => 'SelfTimerMode',
Writable => 'int16u',
},
0x8830 => { #24
Name => 'SensitivityType',
Notes => 'applies to EXIF:ISO tag',
Writable => 'int16u',
PrintConv => {
0 => 'Unknown',
1 => 'Standard Output Sensitivity',
2 => 'Recommended Exposure Index',
3 => 'ISO Speed',
4 => 'Standard Output Sensitivity and Recommended Exposure Index',
5 => 'Standard Output Sensitivity and ISO Speed',
6 => 'Recommended Exposure Index and ISO Speed',
7 => 'Standard Output Sensitivity, Recommended Exposure Index and ISO Speed',
},
},
0x8831 => { #24
Name => 'StandardOutputSensitivity',
Writable => 'int32u',
},
0x8832 => { #24
Name => 'RecommendedExposureIndex',
Writable => 'int32u',
},
0x8833 => { #24
Name => 'ISOSpeed',
Writable => 'int32u',
},
0x8834 => { #24
Name => 'ISOSpeedLatitudeyyy',
Description => 'ISO Speed Latitude yyy',
Writable => 'int32u',
},
0x8835 => { #24
Name => 'ISOSpeedLatitudezzz',
Description => 'ISO Speed Latitude zzz',
Writable => 'int32u',
},
0x885c => 'FaxRecvParams', #9
0x885d => 'FaxSubAddress', #9
0x885e => 'FaxRecvTime', #9
0x8871 => 'FedexEDR', #exifprobe (NC)
# 0x8889 - string: "portrait" (ExifIFD, Xiaomi POCO F1)
0x888a => { #PH
Name => 'LeafSubIFD',
Format => 'int32u', # Leaf incorrectly uses 'undef' format!
Groups => { 1 => 'LeafSubIFD' },
Flags => 'SubIFD',
SubDirectory => {
TagTable => 'Image::ExifTool::Leaf::SubIFD',
Start => '$val',
},
},
# 0x8891 - int16u: 35 (ExifIFD, Xiaomi POCO F1)
# 0x8894 - int16u: 0 (ExifIFD, Xiaomi POCO F1)
# 0x8895 - int16u: 0 (ExifIFD, Xiaomi POCO F1)
# 0x889a - int16u: 0 (ExifIFD, Xiaomi POCO F1)
# 0x89ab - seen "11 100 130 16 0 0 0 0" in IFD0 of TIFF image from IR scanner (forum8470)
0x9000 => {
Name => 'ExifVersion',
Writable => 'undef',
Mandatory => 1,
RawConv => '$val=~s/\0+$//; $val', # (some idiots add null terminators)
# (allow strings like "2.31" when writing)
PrintConvInv => '$val=~tr/.//d; $val=~/^\d{4}$/ ? $val : $val =~ /^\d{3}$/ ? "0$val" : undef',
},
0x9003 => {
Name => 'DateTimeOriginal',
Description => 'Date/Time Original',
Groups => { 2 => 'Time' },
Notes => 'date/time when original image was taken',
Writable => 'string',
Shift => 'Time',
Validate => 'ValidateExifDate($val)',
PrintConv => '$self->ConvertDateTime($val)',
PrintConvInv => '$self->InverseDateTime($val,0)',
},
0x9004 => {
Name => 'CreateDate',
Groups => { 2 => 'Time' },
Notes => 'called DateTimeDigitized by the EXIF spec.',
Writable => 'string',
Shift => 'Time',
Validate => 'ValidateExifDate($val)',
PrintConv => '$self->ConvertDateTime($val)',
PrintConvInv => '$self->InverseDateTime($val,0)',
},
0x9009 => { # undef[44] (or undef[11]) written by Google Plus uploader - PH
Name => 'GooglePlusUploadCode',
Format => 'int8u',
Writable => 'undef',
Count => -1,
},
0x9010 => {
Name => 'OffsetTime',
Groups => { 2 => 'Time' },
Notes => 'time zone for ModifyDate',
Writable => 'string',
PrintConvInv => q{
return "+00:00" if $val =~ /\d{2}Z$/;
return sprintf("%s%.2d:%.2d",$1,$2,$3) if $val =~ /([-+])(\d{1,2}):(\d{2})/;
return undef;
},
},
0x9011 => {
Name => 'OffsetTimeOriginal',
Groups => { 2 => 'Time' },
Notes => 'time zone for DateTimeOriginal',
Writable => 'string',
PrintConvInv => q{
return "+00:00" if $val =~ /\d{2}Z$/;
return sprintf("%s%.2d:%.2d",$1,$2,$3) if $val =~ /([-+])(\d{1,2}):(\d{2})/;
return undef;
},
},
0x9012 => {
Name => 'OffsetTimeDigitized',
Groups => { 2 => 'Time' },
Notes => 'time zone for CreateDate',
Writable => 'string',
PrintConvInv => q{
return "+00:00" if $val =~ /\d{2}Z$/;
return sprintf("%s%.2d:%.2d",$1,$2,$3) if $val =~ /([-+])(\d{1,2}):(\d{2})/;
return undef;
},
},
0x9101 => {
Name => 'ComponentsConfiguration',
Format => 'int8u',
Protected => 1,
Writable => 'undef',
Count => 4,
Mandatory => 1,
ValueConvInv => '$val=~tr/,//d; $val', # (so we can copy from XMP with -n)
PrintConvColumns => 2,
PrintConv => {
0 => '-',
1 => 'Y',
2 => 'Cb',
3 => 'Cr',
4 => 'R',
5 => 'G',
6 => 'B',
OTHER => sub {
my ($val, $inv, $conv) = @_;
my @a = split /,?\s+/, $val;
if ($inv) {
my %invConv;
$invConv{lc $$conv{$_}} = $_ foreach keys %$conv;
# strings like "YCbCr" and "RGB" still work for writing
@a = $a[0] =~ /(Y|Cb|Cr|R|G|B)/g if @a == 1;
foreach (@a) {
$_ = $invConv{lc $_};
return undef unless defined $_;
}
push @a, 0 while @a < 4;
} else {
foreach (@a) {
$_ = $$conv{$_} || "Err ($_)";
}
}
return join ', ', @a;
},
},
},
0x9102 => {
Name => 'CompressedBitsPerPixel',
Protected => 1,
Writable => 'rational64u',
},
# 0x9103 - int16u: 1 (found in Pentax XG-1 samples)
0x9201 => {
Name => 'ShutterSpeedValue',
Notes => 'displayed in seconds, but stored as an APEX value',
Format => 'rational64s', # Leica M8 patch (incorrectly written as rational64u)
Writable => 'rational64s',
ValueConv => 'IsFloat($val) && abs($val)<100 ? 2**(-$val) : 0',
ValueConvInv => '$val>0 ? -log($val)/log(2) : -100',
PrintConv => 'Image::ExifTool::Exif::PrintExposureTime($val)',
PrintConvInv => 'Image::ExifTool::Exif::ConvertFraction($val)',
},
0x9202 => {
Name => 'ApertureValue',
Notes => 'displayed as an F number, but stored as an APEX value',
Writable => 'rational64u',
ValueConv => '2 ** ($val / 2)',
ValueConvInv => '$val>0 ? 2*log($val)/log(2) : 0',
PrintConv => 'sprintf("%.1f",$val)',
PrintConvInv => '$val',
},
# Wikipedia: BrightnessValue = Bv = Av + Tv - Sv
# ExifTool: LightValue = LV = Av + Tv - Sv + 5 (5 is the Sv for ISO 100 in Exif usage)
0x9203 => {
Name => 'BrightnessValue',
Writable => 'rational64s',
},
0x9204 => {
Name => 'ExposureCompensation',
Format => 'rational64s', # Leica M8 patch (incorrectly written as rational64u)
Notes => 'called ExposureBiasValue by the EXIF spec.',
Writable => 'rational64s',
PrintConv => 'Image::ExifTool::Exif::PrintFraction($val)',
PrintConvInv => '$val',
},
0x9205 => {
Name => 'MaxApertureValue',
Notes => 'displayed as an F number, but stored as an APEX value',
Groups => { 2 => 'Camera' },
Writable => 'rational64u',
ValueConv => '2 ** ($val / 2)',
ValueConvInv => '$val>0 ? 2*log($val)/log(2) : 0',
PrintConv => 'sprintf("%.1f",$val)',
PrintConvInv => '$val',
},
0x9206 => {
Name => 'SubjectDistance',
Groups => { 2 => 'Camera' },
Writable => 'rational64u',
PrintConv => '$val =~ /^(inf|undef)$/ ? $val : "${val} m"',
PrintConvInv => '$val=~s/\s*m$//;$val',
},
0x9207 => {
Name => 'MeteringMode',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
PrintConv => {
0 => 'Unknown',
1 => 'Average',
2 => 'Center-weighted average',
3 => 'Spot',
4 => 'Multi-spot',
5 => 'Multi-segment',
6 => 'Partial',
255 => 'Other',
},
},
0x9208 => {
Name => 'LightSource',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
SeparateTable => 'LightSource',
PrintConv => \%lightSource,
},
0x9209 => {
Name => 'Flash',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
Flags => 'PrintHex',
SeparateTable => 'Flash',
PrintConv => \%flash,
},
0x920a => {
Name => 'FocalLength',
Groups => { 2 => 'Camera' },
Writable => 'rational64u',
PrintConv => 'sprintf("%.1f mm",$val)',
PrintConvInv => '$val=~s/\s*mm$//;$val',
},
# Note: tags 0x920b-0x9217 are duplicates of 0xa20b-0xa217
# (The EXIF standard uses 0xa2xx, but you'll find both in images)
0x920b => { #12
Name => 'FlashEnergy',
Groups => { 2 => 'Camera' },
},
0x920c => 'SpatialFrequencyResponse', #12 (not in Fuji images - PH)
0x920d => 'Noise', #12
0x920e => 'FocalPlaneXResolution', #12
0x920f => 'FocalPlaneYResolution', #12
0x9210 => { #12
Name => 'FocalPlaneResolutionUnit',
Groups => { 2 => 'Camera' },
PrintConv => {
1 => 'None',
2 => 'inches',
3 => 'cm',
4 => 'mm',
5 => 'um',
},
},
0x9211 => { #12
Name => 'ImageNumber',
Writable => 'int32u',
},
0x9212 => { #12
Name => 'SecurityClassification',
Writable => 'string',
PrintConv => {
T => 'Top Secret',
S => 'Secret',
C => 'Confidential',
R => 'Restricted',
U => 'Unclassified',
},
},
0x9213 => { #12
Name => 'ImageHistory',
Writable => 'string',
},
0x9214 => {
Name => 'SubjectArea',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
Count => -1, # 2, 3 or 4 values
},
0x9215 => 'ExposureIndex', #12
0x9216 => 'TIFF-EPStandardID', #12
0x9217 => { #12
Name => 'SensingMethod',
Groups => { 2 => 'Camera' },
PrintConv => {
# (values 1 and 6 are not used by corresponding EXIF tag 0xa217)
1 => 'Monochrome area',
2 => 'One-chip color area',
3 => 'Two-chip color area',
4 => 'Three-chip color area',
5 => 'Color sequential area',
6 => 'Monochrome linear',
7 => 'Trilinear',
8 => 'Color sequential linear',
},
},
0x923a => 'CIP3DataFile', #20
0x923b => 'CIP3Sheet', #20
0x923c => 'CIP3Side', #20
0x923f => 'StoNits', #9
# handle maker notes as a conditional list
0x927c => \@Image::ExifTool::MakerNotes::Main,
0x9286 => {
Name => 'UserComment',
# I have seen other applications write it incorrectly as 'string' or 'int8u'
Format => 'undef',
Writable => 'undef',
RawConv => 'Image::ExifTool::Exif::ConvertExifText($self,$val,1,$tag)',
# (starts with "ASCII\0\0\0", "UNICODE\0", "JIS\0\0\0\0\0" or "\0\0\0\0\0\0\0\0")
RawConvInv => 'Image::ExifTool::Exif::EncodeExifText($self,$val)',
# SHOULD ADD SPECIAL LOGIC TO ALLOW CONDITIONAL OVERWRITE OF
# "UNKNOWN" VALUES FILLED WITH SPACES
},
0x9290 => {
Name => 'SubSecTime',
Groups => { 2 => 'Time' },
Notes => 'fractional seconds for ModifyDate',
Writable => 'string',
ValueConv => '$val=~s/ +$//; $val', # trim trailing blanks
# extract fractional seconds from a full date/time value
ValueConvInv => '$val=~/^(\d+)\s*$/ ? $1 : ($val=~/\.(\d+)/ ? $1 : undef)',
},
0x9291 => {
Name => 'SubSecTimeOriginal',
Groups => { 2 => 'Time' },
Notes => 'fractional seconds for DateTimeOriginal',
Writable => 'string',
ValueConv => '$val=~s/ +$//; $val', # trim trailing blanks
ValueConvInv => '$val=~/^(\d+)\s*$/ ? $1 : ($val=~/\.(\d+)/ ? $1 : undef)',
},
0x9292 => {
Name => 'SubSecTimeDigitized',
Groups => { 2 => 'Time' },
Notes => 'fractional seconds for CreateDate',
Writable => 'string',
ValueConv => '$val=~s/ +$//; $val', # trim trailing blanks
ValueConvInv => '$val=~/^(\d+)\s*$/ ? $1 : ($val=~/\.(\d+)/ ? $1 : undef)',
},
# The following 3 tags are found in MSOffice TIFF images
# References:
# http://social.msdn.microsoft.com/Forums/en-US/os_standocs/thread/03086d55-294a-49d5-967a-5303d34c40f8/
# http://blogs.msdn.com/openspecification/archive/2009/12/08/details-of-three-tiff-tag-extensions-that-microsoft-office-document-imaging-modi-software-may-write-into-the-tiff-files-it-generates.aspx
# http://www.microsoft.com/downloads/details.aspx?FamilyID=0dbc435d-3544-4f4b-9092-2f2643d64a39&displaylang=en#filelist
0x932f => 'MSDocumentText',
0x9330 => {
Name => 'MSPropertySetStorage',
Binary => 1,
},
0x9331 => {
Name => 'MSDocumentTextPosition',
Binary => 1, # (just in case -- don't know what format this is)
},
0x935c => { #3/19
Name => 'ImageSourceData', # (writable directory!)
Writable => 'undef',
WriteGroup => 'IFD0',
SubDirectory => { TagTable => 'Image::ExifTool::Photoshop::DocumentData' },
Binary => 1,
Protected => 1, # (because this can be hundreds of megabytes)
ReadFromRAF => 1, # don't load into memory when reading
},
0x9400 => {
Name => 'AmbientTemperature',
Notes => 'ambient temperature in degrees C, called Temperature by the EXIF spec.',
Writable => 'rational64s',
PrintConv => '"$val C"',
PrintConvInv => '$val=~s/ ?C//; $val',
},
0x9401 => {
Name => 'Humidity',
Notes => 'ambient relative humidity in percent',
Writable => 'rational64u',
},
0x9402 => {
Name => 'Pressure',
Notes => 'air pressure in hPa or mbar',
Writable => 'rational64u',
},
0x9403 => {
Name => 'WaterDepth',
Notes => 'depth under water in metres, negative for above water',
Writable => 'rational64s',
},
0x9404 => {
Name => 'Acceleration',
Notes => 'directionless camera acceleration in units of mGal, or 10-5 m/s2',
Writable => 'rational64u',
},
0x9405 => {
Name => 'CameraElevationAngle',
Writable => 'rational64s',
},
# 0x9999 - string: camera settings (ExifIFD, Xiaomi POCO F1)
# 0x9aaa - int8u[2176]: ? (ExifIFD, Xiaomi POCO F1)
0x9c9b => {
Name => 'XPTitle',
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
Notes => q{
tags 0x9c9b-0x9c9f are used by Windows Explorer; special characters
in these values are converted to UTF-8 by default, or Windows Latin1
with the -L option. XPTitle is ignored by Windows Explorer if
ImageDescription exists
},
ValueConv => '$self->Decode($val,"UCS2","II")',
ValueConvInv => '$self->Encode($val,"UCS2","II") . "\0\0"',
},
0x9c9c => {
Name => 'XPComment',
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
ValueConv => '$self->Decode($val,"UCS2","II")',
ValueConvInv => '$self->Encode($val,"UCS2","II") . "\0\0"',
},
0x9c9d => {
Name => 'XPAuthor',
Groups => { 2 => 'Author' },
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
Notes => 'ignored by Windows Explorer if Artist exists',
ValueConv => '$self->Decode($val,"UCS2","II")',
ValueConvInv => '$self->Encode($val,"UCS2","II") . "\0\0"',
},
0x9c9e => {
Name => 'XPKeywords',
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
ValueConv => '$self->Decode($val,"UCS2","II")',
ValueConvInv => '$self->Encode($val,"UCS2","II") . "\0\0"',
},
0x9c9f => {
Name => 'XPSubject',
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
ValueConv => '$self->Decode($val,"UCS2","II")',
ValueConvInv => '$self->Encode($val,"UCS2","II") . "\0\0"',
},
0xa000 => {
Name => 'FlashpixVersion',
Writable => 'undef',
Mandatory => 1,
RawConv => '$val=~s/\0+$//; $val', # (some idiots add null terminators)
PrintConvInv => '$val=~tr/.//d; $val=~/^\d{4}$/ ? $val : undef',
},
0xa001 => {
Name => 'ColorSpace',
Notes => q{
the value of 0x2 is not standard EXIF. Instead, an Adobe RGB image is
indicated by "Uncalibrated" with an InteropIndex of "R03". The values
0xfffd and 0xfffe are also non-standard, and are used by some Sony cameras
},
Writable => 'int16u',
Mandatory => 1,
PrintHex => 1,
PrintConv => {
1 => 'sRGB',
2 => 'Adobe RGB',
0xffff => 'Uncalibrated',
# Sony uses these definitions: (ref JD)
# 0xffff => 'Adobe RGB', (conflicts with Uncalibrated)
0xfffe => 'ICC Profile',
0xfffd => 'Wide Gamut RGB',
},
},
0xa002 => {
Name => 'ExifImageWidth',
Notes => 'called PixelXDimension by the EXIF spec.',
Writable => 'int16u',
Mandatory => 1,
},
0xa003 => {
Name => 'ExifImageHeight',
Notes => 'called PixelYDimension by the EXIF spec.',
Writable => 'int16u',
Mandatory => 1,
},
0xa004 => {
Name => 'RelatedSoundFile',
Writable => 'string',
},
0xa005 => {
Name => 'InteropOffset',
Groups => { 1 => 'InteropIFD' },
Flags => 'SubIFD',
Description => 'Interoperability Offset',
SubDirectory => {
DirName => 'InteropIFD',
Start => '$val',
MaxSubdirs => 1,
},
},
# the following 4 tags found in SubIFD1 of some Samsung SRW images
0xa010 => {
Name => 'SamsungRawPointersOffset',
IsOffset => 1,
OffsetPair => 0xa011, # point to associated byte count
},
0xa011 => {
Name => 'SamsungRawPointersLength',
OffsetPair => 0xa010, # point to associated offset
},
0xa101 => {
Name => 'SamsungRawByteOrder',
Format => 'undef',
# this is written incorrectly as string[1], but is "\0\0MM" or "II\0\0"
FixedSize => 4,
Count => 1,
},
0xa102 => {
Name => 'SamsungRawUnknown',
Unknown => 1,
},
0xa20b => {
Name => 'FlashEnergy',
Groups => { 2 => 'Camera' },
Writable => 'rational64u',
},
0xa20c => {
Name => 'SpatialFrequencyResponse',
PrintConv => 'Image::ExifTool::Exif::PrintSFR($val)',
},
0xa20d => 'Noise',
0xa20e => {
Name => 'FocalPlaneXResolution',
Groups => { 2 => 'Camera' },
Writable => 'rational64u',
},
0xa20f => {
Name => 'FocalPlaneYResolution',
Groups => { 2 => 'Camera' },
Writable => 'rational64u',
},
0xa210 => {
Name => 'FocalPlaneResolutionUnit',
Groups => { 2 => 'Camera' },
Notes => 'values 1, 4 and 5 are not standard EXIF',
Writable => 'int16u',
PrintConv => {
1 => 'None', # (not standard EXIF)
2 => 'inches',
3 => 'cm',
4 => 'mm', # (not standard EXIF)
5 => 'um', # (not standard EXIF)
},
},
0xa211 => 'ImageNumber',
0xa212 => 'SecurityClassification',
0xa213 => 'ImageHistory',
0xa214 => {
Name => 'SubjectLocation',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
Count => 2,
},
0xa215 => { Name => 'ExposureIndex', Writable => 'rational64u' },
0xa216 => 'TIFF-EPStandardID',
0xa217 => {
Name => 'SensingMethod',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
PrintConv => {
1 => 'Not defined',
2 => 'One-chip color area',
3 => 'Two-chip color area',
4 => 'Three-chip color area',
5 => 'Color sequential area',
7 => 'Trilinear',
8 => 'Color sequential linear',
# 15 - used by DJI XT2
},
},
0xa300 => {
Name => 'FileSource',
Writable => 'undef',
ValueConvInv => '($val=~/^\d+$/ and $val < 256) ? chr($val) : $val',
PrintConv => {
1 => 'Film Scanner',
2 => 'Reflection Print Scanner',
3 => 'Digital Camera',
# handle the case where Sigma incorrectly gives this tag a count of 4
"\3\0\0\0" => 'Sigma Digital Camera',
},
},
0xa301 => {
Name => 'SceneType',
Writable => 'undef',
ValueConvInv => 'chr($val & 0xff)',
PrintConv => {
1 => 'Directly photographed',
},
},
0xa302 => {
Name => 'CFAPattern',
Writable => 'undef',
RawConv => 'Image::ExifTool::Exif::DecodeCFAPattern($self, $val)',
RawConvInv => q{
my @a = split ' ', $val;
return $val if @a <= 2; # also accept binary data for backward compatibility
return pack(GetByteOrder() eq 'II' ? 'v2C*' : 'n2C*', @a);
},
PrintConv => 'Image::ExifTool::Exif::PrintCFAPattern($val)',
PrintConvInv => 'Image::ExifTool::Exif::GetCFAPattern($val)',
},
0xa401 => {
Name => 'CustomRendered',
Writable => 'int16u',
Notes => q{
only 0 and 1 are standard EXIF, but other values are used by Apple iOS
devices
},
PrintConv => {
0 => 'Normal',
1 => 'Custom',
2 => 'HDR (no original saved)', #32 non-standard (Apple iOS)
3 => 'HDR (original saved)', #32 non-standard (Apple iOS)
4 => 'Original (for HDR)', #32 non-standard (Apple iOS)
6 => 'Panorama', # non-standard (Apple iOS, horizontal or vertical)
7 => 'Portrait HDR', #32 non-standard (Apple iOS)
8 => 'Portrait', # non-standard (Apple iOS, blurred background)
# 9 - also seen (Apple iOS) (HDR Portrait?)
},
},
0xa402 => {
Name => 'ExposureMode',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
PrintConv => {
0 => 'Auto',
1 => 'Manual',
2 => 'Auto bracket',
# have seen 3 from Samsung EX1, NX30, NX200 - PH
},
},
0xa403 => {
Name => 'WhiteBalance',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
# set Priority to zero to keep this WhiteBalance from overriding the
# MakerNotes WhiteBalance, since the MakerNotes WhiteBalance and is more
# accurate and contains more information (if it exists)
Priority => 0,
PrintConv => {
0 => 'Auto',
1 => 'Manual',
},
},
0xa404 => {
Name => 'DigitalZoomRatio',
Groups => { 2 => 'Camera' },
Writable => 'rational64u',
},
0xa405 => {
Name => 'FocalLengthIn35mmFormat',
Notes => 'called FocalLengthIn35mmFilm by the EXIF spec.',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
PrintConv => '"$val mm"',
PrintConvInv => '$val=~s/\s*mm$//;$val',
},
0xa406 => {
Name => 'SceneCaptureType',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
Notes => 'the value of 4 is non-standard, and used by some Samsung models',
PrintConv => {
0 => 'Standard',
1 => 'Landscape',
2 => 'Portrait',
3 => 'Night',
4 => 'Other', # (non-standard Samsung, ref forum 5724)
},
},
0xa407 => {
Name => 'GainControl',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
PrintConv => {
0 => 'None',
1 => 'Low gain up',
2 => 'High gain up',
3 => 'Low gain down',
4 => 'High gain down',
},
},
0xa408 => {
Name => 'Contrast',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
PrintConv => {
0 => 'Normal',
1 => 'Low',
2 => 'High',
},
PrintConvInv => 'Image::ExifTool::Exif::ConvertParameter($val)',
},
0xa409 => {
Name => 'Saturation',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
PrintConv => {
0 => 'Normal',
1 => 'Low',
2 => 'High',
},
PrintConvInv => 'Image::ExifTool::Exif::ConvertParameter($val)',
},
0xa40a => {
Name => 'Sharpness',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
PrintConv => {
0 => 'Normal',
1 => 'Soft',
2 => 'Hard',
},
PrintConvInv => 'Image::ExifTool::Exif::ConvertParameter($val)',
},
0xa40b => {
Name => 'DeviceSettingDescription',
Groups => { 2 => 'Camera' },
Binary => 1,
},
0xa40c => {
Name => 'SubjectDistanceRange',
Groups => { 2 => 'Camera' },
Writable => 'int16u',
PrintConv => {
0 => 'Unknown',
1 => 'Macro',
2 => 'Close',
3 => 'Distant',
},
},
# 0xa40d - int16u: 0 (GE E1486 TW)
# 0xa40e - int16u: 1 (GE E1486 TW)
0xa420 => { Name => 'ImageUniqueID', Writable => 'string' },
0xa430 => { #24
Name => 'OwnerName',
Notes => 'called CameraOwnerName by the EXIF spec.',
Writable => 'string',
},
0xa431 => { #24
Name => 'SerialNumber',
Notes => 'called BodySerialNumber by the EXIF spec.',
Writable => 'string',
},
0xa432 => { #24
Name => 'LensInfo',
Notes => q{
4 rational values giving focal and aperture ranges, called LensSpecification
by the EXIF spec.
},
Writable => 'rational64u',
Count => 4,
# convert to the form "12-20mm f/3.8-4.5" or "50mm f/1.4"
PrintConv => \&PrintLensInfo,
PrintConvInv => \&ConvertLensInfo,
},
0xa433 => { Name => 'LensMake', Writable => 'string' }, #24
0xa434 => { Name => 'LensModel', Writable => 'string' }, #24
0xa435 => { Name => 'LensSerialNumber', Writable => 'string' }, #24
0xa460 => { #Exif2.32
Name => 'CompositeImage',
Writable => 'int16u',
PrintConv => {
0 => 'Unknown',
1 => 'Not a Composite Image',
2 => 'General Composite Image',
3 => 'Composite Image Captured While Shooting',
},
},
0xa461 => { #Exif2.32
Name => 'CompositeImageCount',
Notes => q{
2 values: 1. Number of source images, 2. Number of images used. Called
SourceImageNumberOfCompositeImage by the EXIF spec.
},
Writable => 'int16u',
Count => 2,
},
0xa462 => { #Exif2.32
Name => 'CompositeImageExposureTimes',
Notes => q{
11 or more values: 1. Total exposure time period, 2. Total exposure of all
source images, 3. Total exposure of all used images, 4. Max exposure time of
source images, 5. Max exposure time of used images, 6. Min exposure time of
source images, 7. Min exposure of used images, 8. Number of sequences, 9.
Number of source images in sequence. 10-N. Exposure times of each source
image. Called SourceExposureTimesOfCompositeImage by the EXIF spec.
},
Writable => 'undef',
RawConv => sub {
my $val = shift;
my @v;
my $i = 0;
for (;;) {
if ($i == 56 or $i == 58) {
last if $i + 2 > length $val;
push @v, Get16u(\$val, $i);
$i += 2;
} else {
last if $i + 8 > length $val;
push @v, Image::ExifTool::GetRational64u(\$val, $i);
$i += 8;
}
}
return join ' ', @v;
},
RawConvInv => sub {
my $val = shift;
my @v = split ' ', $val;
my $i;
for ($i=0; ; ++$i) {
last unless defined $v[$i];
$v[$i] = ($i == 7 or $i == 8) ? Set16u($v[$i]) : Image::ExifTool::SetRational64u($v[$i]);
}
return join '', @v;
},
PrintConv => sub {
my $val = shift;
my @v = split ' ', $val;
my $i;
for ($i=0; ; ++$i) {
last unless defined $v[$i];
$v[$i] = PrintExposureTime($v[$i]) unless $i == 7 or $i == 8;
}
return join ' ', @v;
},
PrintConvInv => '$val',
},
0xa480 => { Name => 'GDALMetadata', Writable => 'string', WriteGroup => 'IFD0' }, #3
0xa481 => { Name => 'GDALNoData', Writable => 'string', WriteGroup => 'IFD0' }, #3
0xa500 => { Name => 'Gamma', Writable => 'rational64u' },
0xafc0 => 'ExpandSoftware', #JD (Opanda)
0xafc1 => 'ExpandLens', #JD (Opanda)
0xafc2 => 'ExpandFilm', #JD (Opanda)
0xafc3 => 'ExpandFilterLens', #JD (Opanda)
0xafc4 => 'ExpandScanner', #JD (Opanda)
0xafc5 => 'ExpandFlashLamp', #JD (Opanda)
0xb4c3 => { Name => 'HasselbladRawImage', Format => 'undef', Binary => 1 }, #IB
#
# Windows Media Photo / HD Photo (WDP/HDP) tags
#
0xbc01 => { #13
Name => 'PixelFormat',
PrintHex => 1,
Format => 'undef',
Notes => q{
tags 0xbc** are used in Windows HD Photo (HDP and WDP) images. The actual
PixelFormat values are 16-byte GUID's but the leading 15 bytes,
'6fddc324-4e03-4bfe-b1853-d77768dc9', have been removed below to avoid
unnecessary clutter
},
ValueConv => q{
require Image::ExifTool::ASF;
$val = Image::ExifTool::ASF::GetGUID($val);
# GUID's are too long, so remove redundant information
$val =~ s/^6fddc324-4e03-4bfe-b185-3d77768dc9//i and $val = hex($val);
return $val;
},
PrintConv => {
0x0d => '24-bit RGB',
0x0c => '24-bit BGR',
0x0e => '32-bit BGR',
0x15 => '48-bit RGB',
0x12 => '48-bit RGB Fixed Point',
0x3b => '48-bit RGB Half',
0x18 => '96-bit RGB Fixed Point',
0x1b => '128-bit RGB Float',
0x0f => '32-bit BGRA',
0x16 => '64-bit RGBA',
0x1d => '64-bit RGBA Fixed Point',
0x3a => '64-bit RGBA Half',
0x1e => '128-bit RGBA Fixed Point',
0x19 => '128-bit RGBA Float',
0x10 => '32-bit PBGRA',
0x17 => '64-bit PRGBA',
0x1a => '128-bit PRGBA Float',
0x1c => '32-bit CMYK',
0x2c => '40-bit CMYK Alpha',
0x1f => '64-bit CMYK',
0x2d => '80-bit CMYK Alpha',
0x20 => '24-bit 3 Channels',
0x21 => '32-bit 4 Channels',
0x22 => '40-bit 5 Channels',
0x23 => '48-bit 6 Channels',
0x24 => '56-bit 7 Channels',
0x25 => '64-bit 8 Channels',
0x2e => '32-bit 3 Channels Alpha',
0x2f => '40-bit 4 Channels Alpha',
0x30 => '48-bit 5 Channels Alpha',
0x31 => '56-bit 6 Channels Alpha',
0x32 => '64-bit 7 Channels Alpha',
0x33 => '72-bit 8 Channels Alpha',
0x26 => '48-bit 3 Channels',
0x27 => '64-bit 4 Channels',
0x28 => '80-bit 5 Channels',
0x29 => '96-bit 6 Channels',
0x2a => '112-bit 7 Channels',
0x2b => '128-bit 8 Channels',
0x34 => '64-bit 3 Channels Alpha',
0x35 => '80-bit 4 Channels Alpha',
0x36 => '96-bit 5 Channels Alpha',
0x37 => '112-bit 6 Channels Alpha',
0x38 => '128-bit 7 Channels Alpha',
0x39 => '144-bit 8 Channels Alpha',
0x08 => '8-bit Gray',
0x0b => '16-bit Gray',
0x13 => '16-bit Gray Fixed Point',
0x3e => '16-bit Gray Half',
0x3f => '32-bit Gray Fixed Point',
0x11 => '32-bit Gray Float',
0x05 => 'Black & White',
0x09 => '16-bit BGR555',
0x0a => '16-bit BGR565',
0x13 => '32-bit BGR101010',
0x3d => '32-bit RGBE',
},
},
0xbc02 => { #13
Name => 'Transformation',
PrintConv => {
0 => 'Horizontal (normal)',
1 => 'Mirror vertical',
2 => 'Mirror horizontal',
3 => 'Rotate 180',
4 => 'Rotate 90 CW',
5 => 'Mirror horizontal and rotate 90 CW',
6 => 'Mirror horizontal and rotate 270 CW',
7 => 'Rotate 270 CW',
},
},
0xbc03 => { #13
Name => 'Uncompressed',
PrintConv => { 0 => 'No', 1 => 'Yes' },
},
0xbc04 => { #13
Name => 'ImageType',
PrintConv => { BITMASK => {
0 => 'Preview',
1 => 'Page',
} },
},
0xbc80 => 'ImageWidth', #13
0xbc81 => 'ImageHeight', #13
0xbc82 => 'WidthResolution', #13
0xbc83 => 'HeightResolution', #13
0xbcc0 => { #13
Name => 'ImageOffset',
IsOffset => 1,
OffsetPair => 0xbcc1, # point to associated byte count
},
0xbcc1 => { #13
Name => 'ImageByteCount',
OffsetPair => 0xbcc0, # point to associated offset
},
0xbcc2 => { #13
Name => 'AlphaOffset',
IsOffset => 1,
OffsetPair => 0xbcc3, # point to associated byte count
},
0xbcc3 => { #13
Name => 'AlphaByteCount',
OffsetPair => 0xbcc2, # point to associated offset
},
0xbcc4 => { #13
Name => 'ImageDataDiscard',
PrintConv => {
0 => 'Full Resolution',
1 => 'Flexbits Discarded',
2 => 'HighPass Frequency Data Discarded',
3 => 'Highpass and LowPass Frequency Data Discarded',
},
},
0xbcc5 => { #13
Name => 'AlphaDataDiscard',
PrintConv => {
0 => 'Full Resolution',
1 => 'Flexbits Discarded',
2 => 'HighPass Frequency Data Discarded',
3 => 'Highpass and LowPass Frequency Data Discarded',
},
},
#
0xc427 => 'OceScanjobDesc', #3
0xc428 => 'OceApplicationSelector', #3
0xc429 => 'OceIDNumber', #3
0xc42a => 'OceImageLogic', #3
0xc44f => { Name => 'Annotations', Binary => 1 }, #7/19
0xc4a5 => {
Name => 'PrintIM', # (writable directory!)
# must set Writable here so this tag will be saved with MakerNotes option
Writable => 'undef',
WriteGroup => 'IFD0',
Binary => 1,
# (don't make Binary/Protected because we can't copy individual PrintIM tags anyway)
Description => 'Print Image Matching',
SubDirectory => {
TagTable => 'Image::ExifTool::PrintIM::Main',
},
PrintConvInv => '$val =~ /^PrintIM/ ? $val : undef', # quick validation
},
0xc51b => { # (Hasselblad H3D)
Name => 'HasselbladExif',
Format => 'undef',
RawConv => q{
$$self{DOC_NUM} = ++$$self{DOC_COUNT};
$self->ExtractInfo(\$val, { ReEntry => 1 });
$$self{DOC_NUM} = 0;
return undef;
},
},
0xc573 => { #PH
Name => 'OriginalFileName',
Notes => 'used by some obscure software', # (possibly Swizzy Photosmacker?)
# (it is a 'string', but obscure, so don't make it writable)
},
0xc580 => { #20
Name => 'USPTOOriginalContentType',
PrintConv => {
0 => 'Text or Drawing',
1 => 'Grayscale',
2 => 'Color',
},
},
# 0xc5d8 - found in CR2 images
# 0xc5d9 - found in CR2 images
0xc5e0 => { #forum8153 (CR2 images)
Name => 'CR2CFAPattern',
ValueConv => {
1 => '0 1 1 2',
2 => '2 1 1 0',
3 => '1 2 0 1',
4 => '1 0 2 1',
},
PrintConv => {
'0 1 1 2' => '[Red,Green][Green,Blue]',
'2 1 1 0' => '[Blue,Green][Green,Red]',
'1 2 0 1' => '[Green,Blue][Red,Green]',
'1 0 2 1' => '[Green,Red][Blue,Green]',
},
},
#
# DNG tags 0xc6XX, 0xc7XX and 0xcdXX (ref 2 unless otherwise stated)
#
0xc612 => {
Name => 'DNGVersion',
Notes => q{
tags 0xc612-0xcd3b are defined by the DNG specification unless otherwise
noted. See L<https://helpx.adobe.com/photoshop/digital-negative.html> for
the specification
},
Writable => 'int8u',
WriteGroup => 'IFD0',
Count => 4,
Protected => 1, # (confuses Apple Preview if written to a TIFF image)
DataMember => 'DNGVersion',
RawConv => '$$self{DNGVersion} = $val',
PrintConv => '$val =~ tr/ /./; $val',
PrintConvInv => '$val =~ tr/./ /; $val',
},
0xc613 => {
Name => 'DNGBackwardVersion',
Writable => 'int8u',
WriteGroup => 'IFD0',
Count => 4,
Protected => 1,
PrintConv => '$val =~ tr/ /./; $val',
PrintConvInv => '$val =~ tr/./ /; $val',
},
0xc614 => {
Name => 'UniqueCameraModel',
Writable => 'string',
WriteGroup => 'IFD0',
},
0xc615 => {
Name => 'LocalizedCameraModel',
WriteGroup => 'IFD0',
%utf8StringConv,
PrintConv => '$self->Printable($val, 0)',
PrintConvInv => '$val',
},
0xc616 => {
Name => 'CFAPlaneColor',
WriteGroup => 'SubIFD', # (only for Validate)
PrintConv => q{
my @cols = qw(Red Green Blue Cyan Magenta Yellow White);
my @vals = map { $cols[$_] || "Unknown($_)" } split(' ', $val);
return join(',', @vals);
},
},
0xc617 => {
Name => 'CFALayout',
WriteGroup => 'SubIFD', # (only for Validate)
PrintConv => {
1 => 'Rectangular',
2 => 'Even columns offset down 1/2 row',
3 => 'Even columns offset up 1/2 row',
4 => 'Even rows offset right 1/2 column',
5 => 'Even rows offset left 1/2 column',
# the following are new for DNG 1.3:
6 => 'Even rows offset up by 1/2 row, even columns offset left by 1/2 column',
7 => 'Even rows offset up by 1/2 row, even columns offset right by 1/2 column',
8 => 'Even rows offset down by 1/2 row, even columns offset left by 1/2 column',
9 => 'Even rows offset down by 1/2 row, even columns offset right by 1/2 column',
},
},
0xc618 => {
Name => 'LinearizationTable',
Writable => 'int16u',
WriteGroup => 'SubIFD',
Count => -1,
Protected => 1,
Binary => 1,
},
0xc619 => {
Name => 'BlackLevelRepeatDim',
Writable => 'int16u',
WriteGroup => 'SubIFD',
Count => 2,
Protected => 1,
},
0xc61a => {
Name => 'BlackLevel',
Writable => 'rational64u',
WriteGroup => 'SubIFD',
Count => -1,
Protected => 1,
},
0xc61b => {
Name => 'BlackLevelDeltaH',
%longBin,
Writable => 'rational64s',
WriteGroup => 'SubIFD',
Count => -1,
Protected => 1,
},
0xc61c => {
Name => 'BlackLevelDeltaV',
%longBin,
Writable => 'rational64s',
WriteGroup => 'SubIFD',
Count => -1,
Protected => 1,
},
0xc61d => {
Name => 'WhiteLevel',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Count => -1,
Protected => 1,
},
0xc61e => {
Name => 'DefaultScale',
Writable => 'rational64u',
WriteGroup => 'SubIFD',
Count => 2,
Protected => 1,
},
0xc61f => {
Name => 'DefaultCropOrigin',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Count => 2,
Protected => 1,
},
0xc620 => {
Name => 'DefaultCropSize',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Count => 2,
Protected => 1,
},
0xc621 => {
Name => 'ColorMatrix1',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc622 => {
Name => 'ColorMatrix2',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc623 => {
Name => 'CameraCalibration1',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc624 => {
Name => 'CameraCalibration2',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc625 => {
Name => 'ReductionMatrix1',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc626 => {
Name => 'ReductionMatrix2',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc627 => {
Name => 'AnalogBalance',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc628 => {
Name => 'AsShotNeutral',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc629 => {
Name => 'AsShotWhiteXY',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => 2,
Protected => 1,
},
0xc62a => {
Name => 'BaselineExposure',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Protected => 1,
},
0xc62b => {
Name => 'BaselineNoise',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Protected => 1,
},
0xc62c => {
Name => 'BaselineSharpness',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Protected => 1,
},
0xc62d => {
Name => 'BayerGreenSplit',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Protected => 1,
},
0xc62e => {
Name => 'LinearResponseLimit',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Protected => 1,
},
0xc62f => {
Name => 'CameraSerialNumber',
Groups => { 2 => 'Camera' },
Writable => 'string',
WriteGroup => 'IFD0',
},
0xc630 => {
Name => 'DNGLensInfo',
Groups => { 2 => 'Camera' },
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => 4,
PrintConv =>\&PrintLensInfo,
PrintConvInv => \&ConvertLensInfo,
},
0xc631 => {
Name => 'ChromaBlurRadius',
Writable => 'rational64u',
WriteGroup => 'SubIFD',
Protected => 1,
},
0xc632 => {
Name => 'AntiAliasStrength',
Writable => 'rational64u',
WriteGroup => 'SubIFD',
Protected => 1,
},
0xc633 => {
Name => 'ShadowScale',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Protected => 1,
},
0xc634 => [
{
Condition => '$$self{TIFF_TYPE} =~ /^(ARW|SR2)$/',
Name => 'SR2Private',
Groups => { 1 => 'SR2' },
Flags => 'SubIFD',
Format => 'int32u',
# some utilities have problems unless this is int8u format:
# - Adobe Camera Raw 5.3 gives an error
# - Apple Preview 10.5.8 gets the wrong white balance
FixFormat => 'int8u', # (stupid Sony)
WriteGroup => 'IFD0', # (for Validate)
SubDirectory => {
DirName => 'SR2Private',
TagTable => 'Image::ExifTool::Sony::SR2Private',
Start => '$val',
},
},
{
Condition => '$$valPt =~ /^Adobe\0/',
Name => 'DNGAdobeData',
Flags => [ 'Binary', 'Protected' ],
Writable => 'undef', # (writable directory!) (to make it possible to delete this mess)
WriteGroup => 'IFD0',
NestedHtmlDump => 1,
SubDirectory => { TagTable => 'Image::ExifTool::DNG::AdobeData' },
Format => 'undef', # but written as int8u (change to undef for speed)
},
{
# Pentax/Samsung models that write AOC maker notes in JPG images:
# K-5,K-7,K-m,K-x,K-r,K10D,K20D,K100D,K110D,K200D,K2000,GX10,GX20
# (Note: the following expression also appears in WriteExif.pl)
Condition => q{
$$valPt =~ /^(PENTAX |SAMSUNG)\0/ and
$$self{Model} =~ /\b(K(-[57mrx]|(10|20|100|110|200)D|2000)|GX(10|20))\b/
},
Name => 'MakerNotePentax',
MakerNotes => 1, # (causes "MakerNotes header" to be identified in HtmlDump output)
Binary => 1,
WriteGroup => 'IFD0', # (for Validate)
# Note: Don't make this block-writable for a few reasons:
# 1) It would be dangerous (possibly confusing Pentax software)
# 2) It is a different format from the JPEG version of MakerNotePentax
# 3) It is converted to JPEG format by RebuildMakerNotes() when copying
SubDirectory => {
TagTable => 'Image::ExifTool::Pentax::Main',
Start => '$valuePtr + 10',
Base => '$start - 10',
ByteOrder => 'Unknown', # easier to do this than read byteorder word
},
Format => 'undef', # but written as int8u (change to undef for speed)
},
{
# must duplicate the above tag with a different name for more recent
# Pentax models which use the "PENTAX" instead of the "AOC" maker notes
# in JPG images (needed when copying maker notes from DNG to JPG)
Condition => '$$valPt =~ /^(PENTAX |SAMSUNG)\0/',
Name => 'MakerNotePentax5',
MakerNotes => 1,
Binary => 1,
WriteGroup => 'IFD0', # (for Validate)
SubDirectory => {
TagTable => 'Image::ExifTool::Pentax::Main',
Start => '$valuePtr + 10',
Base => '$start - 10',
ByteOrder => 'Unknown',
},
Format => 'undef',
},
{
# Ricoh models such as the GR III
Condition => '$$valPt =~ /^RICOH\0(II|MM)/',
Name => 'MakerNoteRicohPentax',
MakerNotes => 1,
Binary => 1,
WriteGroup => 'IFD0', # (for Validate)
SubDirectory => {
TagTable => 'Image::ExifTool::Pentax::Main',
Start => '$valuePtr + 8',
Base => '$start - 8',
ByteOrder => 'Unknown',
},
Format => 'undef',
},
# the DJI FC2103 writes some interesting stuff here (with sections labelled
# awb_dbg_info, ae_dbg_info, ae_histogram_info, af_dbg_info, hiso, xidiri) - PH
{
Name => 'DNGPrivateData',
Flags => [ 'Binary', 'Protected' ],
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
},
],
0xc635 => {
Name => 'MakerNoteSafety',
Writable => 'int16u',
WriteGroup => 'IFD0',
PrintConv => {
0 => 'Unsafe',
1 => 'Safe',
},
},
0xc640 => { #15
Name => 'RawImageSegmentation',
# (int16u[3], not writable)
Notes => q{
used in segmented Canon CR2 images. 3 numbers: 1. Number of segments minus
one; 2. Pixel width of segments except last; 3. Pixel width of last segment
},
},
0xc65a => {
Name => 'CalibrationIlluminant1',
Writable => 'int16u',
WriteGroup => 'IFD0',
Protected => 1,
SeparateTable => 'LightSource',
PrintConv => \%lightSource,
},
0xc65b => {
Name => 'CalibrationIlluminant2',
Writable => 'int16u',
WriteGroup => 'IFD0',
Protected => 1,
SeparateTable => 'LightSource',
PrintConv => \%lightSource,
},
0xc65c => {
Name => 'BestQualityScale',
Writable => 'rational64u',
WriteGroup => 'SubIFD',
Protected => 1,
},
0xc65d => {
Name => 'RawDataUniqueID',
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
Count => 16,
Protected => 1,
ValueConv => 'uc(unpack("H*",$val))',
ValueConvInv => 'pack("H*", $val)',
},
0xc660 => { #3
Name => 'AliasLayerMetadata',
Notes => 'used by Alias Sketchbook Pro',
},
0xc68b => {
Name => 'OriginalRawFileName',
WriteGroup => 'IFD0',
Protected => 1,
%utf8StringConv,
},
0xc68c => {
Name => 'OriginalRawFileData', # (writable directory!)
Writable => 'undef', # must be defined here so tag will be extracted if specified
WriteGroup => 'IFD0',
Flags => [ 'Binary', 'Protected' ],
SubDirectory => {
TagTable => 'Image::ExifTool::DNG::OriginalRaw',
},
},
0xc68d => {
Name => 'ActiveArea',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Count => 4,
Protected => 1,
},
0xc68e => {
Name => 'MaskedAreas',
Writable => 'int32u',
WriteGroup => 'SubIFD',
Count => -1,
Protected => 1,
},
0xc68f => {
Name => 'AsShotICCProfile', # (writable directory)
Binary => 1,
Writable => 'undef', # must be defined here so tag will be extracted if specified
WriteGroup => 'IFD0',
Protected => 1,
WriteCheck => q{
require Image::ExifTool::ICC_Profile;
return Image::ExifTool::ICC_Profile::ValidateICC(\$val);
},
SubDirectory => {
DirName => 'AsShotICCProfile',
TagTable => 'Image::ExifTool::ICC_Profile::Main',
},
},
0xc690 => {
Name => 'AsShotPreProfileMatrix',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc691 => {
Name => 'CurrentICCProfile', # (writable directory)
Binary => 1,
Writable => 'undef', # must be defined here so tag will be extracted if specified
SubDirectory => {
DirName => 'CurrentICCProfile',
TagTable => 'Image::ExifTool::ICC_Profile::Main',
},
Writable => 'undef',
WriteGroup => 'IFD0',
Protected => 1,
WriteCheck => q{
require Image::ExifTool::ICC_Profile;
return Image::ExifTool::ICC_Profile::ValidateICC(\$val);
},
},
0xc692 => {
Name => 'CurrentPreProfileMatrix',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc6bf => {
Name => 'ColorimetricReference',
Writable => 'int16u',
WriteGroup => 'IFD0',
Protected => 1,
},
0xc6c5 => { Name => 'SRawType', Description => 'SRaw Type', WriteGroup => 'IFD0' }, #exifprobe (CR2 proprietary)
0xc6d2 => { #JD (Panasonic DMC-TZ5)
# this text is UTF-8 encoded (hooray!) - PH (TZ5)
Name => 'PanasonicTitle',
Format => 'string', # written incorrectly as 'undef'
Notes => 'proprietary Panasonic tag used for baby/pet name, etc',
Writable => 'undef',
WriteGroup => 'IFD0',
# panasonic always records this tag (64 zero bytes),
# so ignore it unless it contains valid information
RawConv => 'length($val) ? $val : undef',
ValueConv => '$self->Decode($val, "UTF8")',
ValueConvInv => '$self->Encode($val,"UTF8")',
},
0xc6d3 => { #PH (Panasonic DMC-FS7)
Name => 'PanasonicTitle2',
Format => 'string', # written incorrectly as 'undef'
Notes => 'proprietary Panasonic tag used for baby/pet name with age',
Writable => 'undef',
WriteGroup => 'IFD0',
# panasonic always records this tag (128 zero bytes),
# so ignore it unless it contains valid information
RawConv => 'length($val) ? $val : undef',
ValueConv => '$self->Decode($val, "UTF8")',
ValueConvInv => '$self->Encode($val,"UTF8")',
},
# 0xc6dc - int32u[4]: found in CR2 images (PH, 7DmkIII)
# 0xc6dd - int16u[256]: found in CR2 images (PH, 5DmkIV)
0xc6f3 => {
Name => 'CameraCalibrationSig',
WriteGroup => 'IFD0',
Protected => 1,
%utf8StringConv,
},
0xc6f4 => {
Name => 'ProfileCalibrationSig',
WriteGroup => 'IFD0',
Protected => 1,
%utf8StringConv,
},
0xc6f5 => {
Name => 'ProfileIFD', # (ExtraCameraProfiles)
Groups => { 1 => 'ProfileIFD' },
Flags => 'SubIFD',
WriteGroup => 'IFD0', # (only for Validate)
SubDirectory => {
ProcessProc => \&ProcessTiffIFD,
WriteProc => \&ProcessTiffIFD,
DirName => 'ProfileIFD',
Start => '$val',
Base => '$start', # offsets relative to start of TIFF-like header
MaxSubdirs => 10,
Magic => 0x4352, # magic number for TIFF-like header
},
},
0xc6f6 => {
Name => 'AsShotProfileName',
WriteGroup => 'IFD0',
Protected => 1,
%utf8StringConv,
},
0xc6f7 => {
Name => 'NoiseReductionApplied',
Writable => 'rational64u',
WriteGroup => 'SubIFD',
Protected => 1,
},
0xc6f8 => {
Name => 'ProfileName',
WriteGroup => 'IFD0',
Protected => 1,
%utf8StringConv,
},
0xc6f9 => {
Name => 'ProfileHueSatMapDims',
Writable => 'int32u',
WriteGroup => 'IFD0',
Count => 3,
Protected => 1,
},
0xc6fa => {
Name => 'ProfileHueSatMapData1',
%longBin,
Writable => 'float',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc6fb => {
Name => 'ProfileHueSatMapData2',
%longBin,
Writable => 'float',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc6fc => {
Name => 'ProfileToneCurve',
%longBin,
Writable => 'float',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc6fd => {
Name => 'ProfileEmbedPolicy',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 1,
PrintConv => {
0 => 'Allow Copying',
1 => 'Embed if Used',
2 => 'Never Embed',
3 => 'No Restrictions',
},
},
0xc6fe => {
Name => 'ProfileCopyright',
WriteGroup => 'IFD0',
Protected => 1,
%utf8StringConv,
},
0xc714 => {
Name => 'ForwardMatrix1',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc715 => {
Name => 'ForwardMatrix2',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc716 => {
Name => 'PreviewApplicationName',
WriteGroup => 'IFD0',
Protected => 1,
%utf8StringConv,
},
0xc717 => {
Name => 'PreviewApplicationVersion',
Writable => 'string',
WriteGroup => 'IFD0',
Protected => 1,
%utf8StringConv,
},
0xc718 => {
Name => 'PreviewSettingsName',
Writable => 'string',
WriteGroup => 'IFD0',
Protected => 1,
%utf8StringConv,
},
0xc719 => {
Name => 'PreviewSettingsDigest',
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
Protected => 1,
ValueConv => 'unpack("H*", $val)',
ValueConvInv => 'pack("H*", $val)',
},
0xc71a => {
Name => 'PreviewColorSpace',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 1,
PrintConv => {
0 => 'Unknown',
1 => 'Gray Gamma 2.2',
2 => 'sRGB',
3 => 'Adobe RGB',
4 => 'ProPhoto RGB',
},
},
0xc71b => {
Name => 'PreviewDateTime',
Groups => { 2 => 'Time' },
Writable => 'string',
Shift => 'Time',
WriteGroup => 'IFD0',
Protected => 1,
ValueConv => q{
require Image::ExifTool::XMP;
return Image::ExifTool::XMP::ConvertXMPDate($val);
},
ValueConvInv => q{
require Image::ExifTool::XMP;
return Image::ExifTool::XMP::FormatXMPDate($val);
},
PrintConv => '$self->ConvertDateTime($val)',
PrintConvInv => '$self->InverseDateTime($val,1,1)',
},
0xc71c => {
Name => 'RawImageDigest',
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
Count => 16,
Protected => 1,
ValueConv => 'unpack("H*", $val)',
ValueConvInv => 'pack("H*", $val)',
},
0xc71d => {
Name => 'OriginalRawFileDigest',
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
Count => 16,
Protected => 1,
ValueConv => 'unpack("H*", $val)',
ValueConvInv => 'pack("H*", $val)',
},
0xc71e => 'SubTileBlockSize',
0xc71f => 'RowInterleaveFactor',
0xc725 => {
Name => 'ProfileLookTableDims',
Writable => 'int32u',
WriteGroup => 'IFD0',
Count => 3,
Protected => 1,
},
0xc726 => {
Name => 'ProfileLookTableData',
%longBin,
Writable => 'float',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xc740 => { Name => 'OpcodeList1', %opcodeInfo }, # DNG 1.3
0xc741 => { Name => 'OpcodeList2', %opcodeInfo }, # DNG 1.3
0xc74e => { Name => 'OpcodeList3', %opcodeInfo }, # DNG 1.3
0xc761 => { # DNG 1.3
Name => 'NoiseProfile',
Writable => 'double',
WriteGroup => 'SubIFD',
Count => -1,
Protected => 1,
},
0xc763 => { #28
Name => 'TimeCodes',
Writable => 'int8u',
WriteGroup => 'IFD0',
Count => -1, # (8 * number of time codes, max 10)
ValueConv => q{
my @a = split ' ', $val;
my @v;
push @v, join('.', map { sprintf('%.2x',$_) } splice(@a,0,8)) while @a >= 8;
join ' ', @v;
},
ValueConvInv => q{
my @a = map hex, split /[. ]+/, $val;
join ' ', @a;
},
# Note: Currently ignore the flags:
# byte 0 0x80 - color frame
# byte 0 0x40 - drop frame
# byte 1 0x80 - field phase
PrintConv => q{
my @a = map hex, split /[. ]+/, $val;
my @v;
while (@a >= 8) {
my $str = sprintf("%.2x:%.2x:%.2x.%.2x", $a[3]&0x3f,
$a[2]&0x7f, $a[1]&0x7f, $a[0]&0x3f);
if ($a[3] & 0x80) { # date+timezone exist if BGF2 is set
my $tz = $a[7] & 0x3f;
my $bz = sprintf('%.2x', $tz);
$bz = 100 if $bz =~ /[a-f]/i; # not BCD
if ($bz < 26) {
$tz = ($bz < 13 ? 0 : 26) - $bz;
} elsif ($bz == 32) {
$tz = 12.75;
} elsif ($bz >= 28 and $bz <= 31) {
$tz = 0; # UTC
} elsif ($bz < 100) {
undef $tz; # undefined or user-defined
} elsif ($tz < 0x20) {
$tz = (($tz < 0x10 ? 10 : 20) - $tz) - 0.5;
} else {
$tz = (($tz < 0x30 ? 53 : 63) - $tz) + 0.5;
}
if ($a[7] & 0x80) { # MJD format (/w UTC time)
my ($h,$m,$s,$f) = split /[:.]/, $str;
my $jday = sprintf('%x%.2x%.2x', reverse @a[4..6]);
$str = ConvertUnixTime(($jday - 40587) * 24 * 3600
+ ((($h+$tz) * 60) + $m) * 60 + $s) . ".$f";
$str =~ s/^(\d+):(\d+):(\d+) /$1-$2-${3}T/;
} else { # YYMMDD (Note: CinemaDNG 1.1 example seems wrong)
my $yr = sprintf('%.2x',$a[6]) + 1900;
$yr += 100 if $yr < 1970;
$str = sprintf('%d-%.2x-%.2xT%s',$yr,$a[5],$a[4],$str);
}
$str .= TimeZoneString($tz*60) if defined $tz;
}
push @v, $str;
splice @a, 0, 8;
}
join ' ', @v;
},
PrintConvInv => q{
my @a = split ' ', $val;
my @v;
foreach (@a) {
my @td = reverse split /T/;
my $tz = 0x39; # default to unknown timezone
if ($td[0] =~ s/([-+])(\d+):(\d+)$//) {
if ($3 == 0) {
$tz = hex(($1 eq '-') ? $2 : 0x26 - $2);
} elsif ($3 == 30) {
if ($1 eq '-') {
$tz = $2 + 0x0a;
$tz += 0x0a if $tz > 0x0f;
} else {
$tz = 0x3f - $2;
$tz -= 0x0a if $tz < 0x3a;
}
} elsif ($3 == 45) {
$tz = 0x32 if $1 eq '+' and $2 == 12;
}
}
my @t = split /[:.]/, $td[0];
push @t, '00' while @t < 4;
my $bg;
if ($td[1]) {
# date was specified: fill in date & timezone
my @d = split /[-]/, $td[1];
next if @d < 3;
$bg = sprintf('.%.2d.%.2d.%.2d.%.2x', $d[2], $d[1], $d[0]%100, $tz);
$t[0] = sprintf('%.2x', hex($t[0]) + 0xc0); # set BGF1+BGF2
} else { # time only
$bg = '.00.00.00.00';
}
push @v, join('.', reverse(@t[0..3])) . $bg;
}
join ' ', @v;
},
},
0xc764 => { #28
Name => 'FrameRate',
Writable => 'rational64s',
WriteGroup => 'IFD0',
PrintConv => 'int($val * 1000 + 0.5) / 1000',
PrintConvInv => '$val',
},
0xc772 => { #28
Name => 'TStop',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => -1, # (1 or 2)
PrintConv => 'join("-", map { sprintf("%.2f",$_) } split " ", $val)',
PrintConvInv => '$val=~tr/-/ /; $val',
},
0xc789 => { #28
Name => 'ReelName',
Writable => 'string',
WriteGroup => 'IFD0',
},
0xc791 => { # DNG 1.4
Name => 'OriginalDefaultFinalSize',
Writable => 'int32u',
WriteGroup => 'IFD0',
Count => 2,
Protected => 1,
},
0xc792 => { # DNG 1.4
Name => 'OriginalBestQualitySize',
Notes => 'called OriginalBestQualityFinalSize by the DNG spec',
Writable => 'int32u',
WriteGroup => 'IFD0',
Count => 2,
Protected => 1,
},
0xc793 => { # DNG 1.4
Name => 'OriginalDefaultCropSize',
Writable => 'rational64u',
WriteGroup => 'IFD0',
Count => 2,
Protected => 1,
},
0xc7a1 => { #28
Name => 'CameraLabel',
Writable => 'string',
WriteGroup => 'IFD0',
},
0xc7a3 => { # DNG 1.4
Name => 'ProfileHueSatMapEncoding',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 1,
PrintConv => {
0 => 'Linear',
1 => 'sRGB',
},
},
0xc7a4 => { # DNG 1.4
Name => 'ProfileLookTableEncoding',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 1,
PrintConv => {
0 => 'Linear',
1 => 'sRGB',
},
},
0xc7a5 => { # DNG 1.4
Name => 'BaselineExposureOffset',
Writable => 'rational64s', # (incorrectly "RATIONAL" in DNG 1.4 spec)
WriteGroup => 'IFD0',
Protected => 1,
},
0xc7a6 => { # DNG 1.4
Name => 'DefaultBlackRender',
Writable => 'int32u',
WriteGroup => 'IFD0',
Protected => 1,
PrintConv => {
0 => 'Auto',
1 => 'None',
},
},
0xc7a7 => { # DNG 1.4
Name => 'NewRawImageDigest',
Format => 'undef',
Writable => 'int8u',
WriteGroup => 'IFD0',
Count => 16,
Protected => 1,
ValueConv => 'unpack("H*", $val)',
ValueConvInv => 'pack("H*", $val)',
},
0xc7a8 => { # DNG 1.4
Name => 'RawToPreviewGain',
Writable => 'double',
WriteGroup => 'IFD0',
Protected => 1,
},
# 0xc7a9 - CacheBlob (ref 31)
0xc7aa => { #31 undocumented DNG tag written by LR4 (val=256, related to fast load data?)
Name => 'CacheVersion',
Writable => 'int32u',
WriteGroup => 'SubIFD2',
Format => 'int8u',
Count => 4,
Protected => 1,
PrintConv => '$val =~ tr/ /./; $val',
PrintConvInv => '$val =~ tr/./ /; $val',
},
0xc7b5 => { # DNG 1.4
Name => 'DefaultUserCrop',
Writable => 'rational64u',
WriteGroup => 'SubIFD',
Count => 4,
Protected => 1,
},
0xc7d5 => { #PH (in SubIFD1 of Nikon Z6/Z7 NEF images)
Name => 'NikonNEFInfo',
Condition => '$$valPt =~ /^Nikon\0/',
SubDirectory => {
TagTable => 'Image::ExifTool::Nikon::NEFInfo',
Start => '$valuePtr + 18',
Base => '$start - 8',
ByteOrder => 'Unknown',
},
},
# 0xc7d6 - int8u: 1 (SubIFD1 of Nikon Z6/Z7 NEF)
0xc7e9 => { # DNG 1.5
Name => 'DepthFormat',
Writable => 'int16u',
Notes => 'tags 0xc7e9-0xc7ee added by DNG 1.5.0.0',
Protected => 1,
WriteGroup => 'IFD0',
PrintConv => {
0 => 'Unknown',
1 => 'Linear',
2 => 'Inverse',
},
},
0xc7ea => { # DNG 1.5
Name => 'DepthNear',
Writable => 'rational64u',
Protected => 1,
WriteGroup => 'IFD0',
},
0xc7eb => { # DNG 1.5
Name => 'DepthFar',
Writable => 'rational64u',
Protected => 1,
WriteGroup => 'IFD0',
},
0xc7ec => { # DNG 1.5
Name => 'DepthUnits',
Writable => 'int16u',
Protected => 1,
WriteGroup => 'IFD0',
PrintConv => {
0 => 'Unknown',
1 => 'Meters',
},
},
0xc7ed => { # DNG 1.5
Name => 'DepthMeasureType',
Writable => 'int16u',
Protected => 1,
WriteGroup => 'IFD0',
PrintConv => {
0 => 'Unknown',
1 => 'Optical Axis',
2 => 'Optical Ray',
},
},
0xc7ee => { # DNG 1.5
Name => 'EnhanceParams',
Writable => 'string',
Protected => 1,
WriteGroup => 'IFD0',
},
0xcd2d => { # DNG 1.6
Name => 'ProfileGainTableMap',
Writable => 'undef',
WriteGroup => 'SubIFD',
Protected => 1,
Binary => 1,
},
0xcd2e => { # DNG 1.6
Name => 'SemanticName',
# Writable => 'string',
WriteGroup => 'SubIFD' #? (NC) Semantic Mask IFD (only for Validate)
},
0xcd30 => { # DNG 1.6
Name => 'SemanticInstanceIFD',
# Writable => 'string',
WriteGroup => 'SubIFD' #? (NC) Semantic Mask IFD (only for Validate)
},
0xcd31 => { # DNG 1.6
Name => 'CalibrationIlluminant3',
Writable => 'int16u',
WriteGroup => 'IFD0',
Protected => 1,
SeparateTable => 'LightSource',
PrintConv => \%lightSource,
},
0xcd32 => { # DNG 1.6
Name => 'CameraCalibration3',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xcd33 => { # DNG 1.6
Name => 'ColorMatrix3',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xcd34 => { # DNG 1.6
Name => 'ForwardMatrix3',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xcd35 => { # DNG 1.6
Name => 'IlluminantData1',
Writable => 'undef',
WriteGroup => 'IFD0',
Protected => 1,
},
0xcd36 => { # DNG 1.6
Name => 'IlluminantData2',
Writable => 'undef',
WriteGroup => 'IFD0',
Protected => 1,
},
0xcd37 => { # DNG 1.6
Name => 'IlluminantData3',
Writable => 'undef',
WriteGroup => 'IFD0',
Protected => 1,
},
0xcd38 => { # DNG 1.6
Name => 'MaskSubArea',
# Writable => 'int32u',
WriteGroup => 'SubIFD', #? (NC) Semantic Mask IFD (only for Validate)
Count => 4,
},
0xcd39 => { # DNG 1.6
Name => 'ProfileHueSatMapData3',
%longBin,
Writable => 'float',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xcd3a => { # DNG 1.6
Name => 'ReductionMatrix3',
Writable => 'rational64s',
WriteGroup => 'IFD0',
Count => -1,
Protected => 1,
},
0xcd3b => { # DNG 1.6
Name => 'RGBTables',
Writable => 'undef',
WriteGroup => 'IFD0',
Protected => 1,
},
0xea1c => { #13
Name => 'Padding',
Binary => 1,
Protected => 1,
Writable => 'undef',
# must start with 0x1c 0xea by the WM Photo specification
# (not sure what should happen if padding is only 1 byte)
# (why does MicrosoftPhoto write "1c ea 00 00 00 08"?)
RawConvInv => '$val=~s/^../\x1c\xea/s; $val',
},
0xea1d => {
Name => 'OffsetSchema',
Notes => "Microsoft's ill-conceived maker note offset difference",
Protected => 1,
Writable => 'int32s',
# From the Microsoft documentation:
#
# Any time the "Maker Note" is relocated by Windows, the Exif MakerNote
# tag (37500) is updated automatically to reference the new location. In
# addition, Windows records the offset (or difference) between the old and
# new locations in the Exif OffsetSchema tag (59933). If the "Maker Note"
# contains relative references, the developer can add the value in
# OffsetSchema to the original references to find the correct information.
#
# My recommendation is for other developers to ignore this tag because the
# information it contains is unreliable. It will be wrong if the image has
# been subsequently edited by another application that doesn't recognize the
# new Microsoft tag.
#
# The new tag unfortunately only gives the difference between the new maker
# note offset and the original offset. Instead, it should have been designed
# to store the original offset. The new offset may change if the image is
# edited, which will invalidate the tag as currently written. If instead the
# original offset had been stored, the new difference could be easily
# calculated because the new maker note offset is known.
#
# I exchanged emails with a Microsoft technical representative, pointing out
# this problem shortly after they released the update (Feb 2007), but so far
# they have taken no steps to address this.
},
# 0xefee - int16u: 0 - seen this from a WIC-scanned image
# tags in the range 0xfde8-0xfe58 have been observed in PS7 files
# generated from RAW images. They are all strings with the
# tag name at the start of the string. To accommodate these types
# of tags, all tags with values above 0xf000 are handled specially
# by ProcessExif().
0xfde8 => {
Name => 'OwnerName',
Condition => '$$self{TIFF_TYPE} ne "DCR"', # (used for another purpose in Kodak DCR images)
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Owner's Name: $val"},
Notes => q{
tags 0xfde8-0xfdea and 0xfe4c-0xfe58 are generated by Photoshop Camera RAW.
Some names are the same as other EXIF tags, but ExifTool will avoid writing
these unless they already exist in the file
},
},
0xfde9 => {
Name => 'SerialNumber',
Condition => '$$self{TIFF_TYPE} ne "DCR"', # (used for another purpose in Kodak DCR SubIFD)
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Serial Number: $val"},
},
0xfdea => {
Name => 'Lens',
Condition => '$$self{TIFF_TYPE} ne "DCR"', # (used for another purpose in Kodak DCR SubIFD)
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Lens: $val"},
},
0xfe4c => {
Name => 'RawFile',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Raw File: $val"},
},
0xfe4d => {
Name => 'Converter',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Converter: $val"},
},
0xfe4e => {
Name => 'WhiteBalance',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"White Balance: $val"},
},
0xfe51 => {
Name => 'Exposure',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Exposure: $val"},
},
0xfe52 => {
Name => 'Shadows',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Shadows: $val"},
},
0xfe53 => {
Name => 'Brightness',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Brightness: $val"},
},
0xfe54 => {
Name => 'Contrast',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Contrast: $val"},
},
0xfe55 => {
Name => 'Saturation',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Saturation: $val"},
},
0xfe56 => {
Name => 'Sharpness',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Sharpness: $val"},
},
0xfe57 => {
Name => 'Smoothness',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Smoothness: $val"},
},
0xfe58 => {
Name => 'MoireFilter',
Avoid => 1,
PSRaw => 1,
Writable => 'string',
ValueConv => '$val=~s/^.*: //;$val',
ValueConvInv => q{"Moire Filter: $val"},
},
#-------------
0xfe00 => {
Name => 'KDC_IFD',
Groups => { 1 => 'KDC_IFD' },
Flags => 'SubIFD',
Notes => 'used in some Kodak KDC images',
SubDirectory => {
TagTable => 'Image::ExifTool::Kodak::KDC_IFD',
DirName => 'KDC_IFD',
Start => '$val',
},
},
);
# conversions for Composite SubSec date/time tags
my %subSecConv = (
# @val array: 0) date/time, 1) sub-seconds, 2) time zone offset
RawConv => q{
my $v;
if (defined $val[1] and $val[1]=~/^(\d+)/) {
my $subSec = $1;
# be careful here just in case the time already contains sub-seconds or a timezone (contrary to spec)
undef $v unless ($v = $val[0]) =~ s/( \d{2}:\d{2}:\d{2})(?!\.\d+)/$1\.$subSec/;
}
if (defined $val[2] and $val[0]!~/[-+]/ and $val[2]=~/^([-+])(\d{1,2}):(\d{2})/) {
$v = ($v || $val[0]) . sprintf('%s%.2d:%.2d', $1, $2, $3);
}
return $v;
},
PrintConv => '$self->ConvertDateTime($val)',
PrintConvInv => '$self->InverseDateTime($val)',
);
# EXIF Composite tags (plus other more general Composite tags)
%Image::ExifTool::Exif::Composite = (
GROUPS => { 2 => 'Image' },
ImageSize => {
Require => {
0 => 'ImageWidth',
1 => 'ImageHeight',
},
Desire => {
2 => 'ExifImageWidth',
3 => 'ExifImageHeight',
4 => 'RawImageCroppedSize', # (FujiFilm RAF images)
},
# use ExifImageWidth/Height only for Canon and Phase One TIFF-base RAW images
ValueConv => q{
return $val[4] if $val[4];
return "$val[2] $val[3]" if $val[2] and $val[3] and
$$self{TIFF_TYPE} =~ /^(CR2|Canon 1D RAW|IIQ|EIP)$/;
return "$val[0] $val[1]" if IsFloat($val[0]) and IsFloat($val[1]);
return undef;
},
PrintConv => '$val =~ tr/ /x/; $val',
},
Megapixels => {
Require => 'ImageSize',
ValueConv => 'my @d = ($val =~ /\d+/g); $d[0] * $d[1] / 1000000',
PrintConv => 'sprintf("%.*f", ($val >= 1 ? 1 : ($val >= 0.001 ? 3 : 6)), $val)',
},
# pick the best shutter speed value
ShutterSpeed => {
Desire => {
0 => 'ExposureTime',
1 => 'ShutterSpeedValue',
2 => 'BulbDuration',
},
ValueConv => '($val[2] and $val[2]>0) ? $val[2] : (defined($val[0]) ? $val[0] : $val[1])',
PrintConv => 'Image::ExifTool::Exif::PrintExposureTime($val)',
},
Aperture => {
Desire => {
0 => 'FNumber',
1 => 'ApertureValue',
},
RawConv => '($val[0] || $val[1]) ? $val : undef',
ValueConv => '$val[0] || $val[1]',
PrintConv => 'Image::ExifTool::Exif::PrintFNumber($val)',
},
LightValue => {
Notes => q{
calculated LV = 2 * log2(Aperture) - log2(ShutterSpeed) - log2(ISO/100);
similar to exposure value but normalized to ISO 100
},
Require => {
0 => 'Aperture',
1 => 'ShutterSpeed',
2 => 'ISO',
},
ValueConv => 'Image::ExifTool::Exif::CalculateLV($val[0],$val[1],$prt[2])',
PrintConv => 'sprintf("%.1f",$val)',
},
FocalLength35efl => { #26/PH
Description => 'Focal Length',
Notes => 'this value may be incorrect if the image has been resized',
Groups => { 2 => 'Camera' },
Require => {
0 => 'FocalLength',
},
Desire => {
1 => 'ScaleFactor35efl',
},
ValueConv => 'ToFloat(@val); ($val[0] || 0) * ($val[1] || 1)',
PrintConv => '$val[1] ? sprintf("%.1f mm (35 mm equivalent: %.1f mm)", $val[0], $val) : sprintf("%.1f mm", $val)',
},
ScaleFactor35efl => { #26/PH
Description => 'Scale Factor To 35 mm Equivalent',
Notes => q{
this value and any derived values may be incorrect if the image has been
resized
},
Groups => { 2 => 'Camera' },
Desire => {
0 => 'FocalLength',
1 => 'FocalLengthIn35mmFormat',
2 => 'Composite:DigitalZoom',
3 => 'FocalPlaneDiagonal',
4 => 'SensorSize',
5 => 'FocalPlaneXSize',
6 => 'FocalPlaneYSize',
7 => 'FocalPlaneResolutionUnit',
8 => 'FocalPlaneXResolution',
9 => 'FocalPlaneYResolution',
10 => 'ExifImageWidth',
11 => 'ExifImageHeight',
12 => 'CanonImageWidth',
13 => 'CanonImageHeight',
14 => 'ImageWidth',
15 => 'ImageHeight',
},
ValueConv => 'Image::ExifTool::Exif::CalcScaleFactor35efl($self, @val)',
PrintConv => 'sprintf("%.1f", $val)',
},
CircleOfConfusion => {
Notes => q{
calculated as D/1440, where D is the focal plane diagonal in mm. This value
may be incorrect if the image has been resized
},
Groups => { 2 => 'Camera' },
Require => 'ScaleFactor35efl',
ValueConv => 'sqrt(24*24+36*36) / ($val * 1440)',
PrintConv => 'sprintf("%.3f mm",$val)',
},
HyperfocalDistance => {
Notes => 'this value may be incorrect if the image has been resized',
Groups => { 2 => 'Camera' },
Require => {
0 => 'FocalLength',
1 => 'Aperture',
2 => 'CircleOfConfusion',
},
ValueConv => q{
ToFloat(@val);
return 'inf' unless $val[1] and $val[2];
return $val[0] * $val[0] / ($val[1] * $val[2] * 1000);
},
PrintConv => 'sprintf("%.2f m", $val)',
},
DOF => {
Description => 'Depth Of Field',
Notes => 'this value may be incorrect if the image has been resized',
Require => {
0 => 'FocalLength',
1 => 'Aperture',
2 => 'CircleOfConfusion',
},
Desire => {
3 => 'FocusDistance', # focus distance in metres (0 is infinity)
4 => 'SubjectDistance',
5 => 'ObjectDistance',
6 => 'ApproximateFocusDistance',
7 => 'FocusDistanceLower',
8 => 'FocusDistanceUpper',
},
ValueConv => q{
ToFloat(@val);
my ($d, $f) = ($val[3], $val[0]);
if (defined $d) {
$d or $d = 1e10; # (use large number for infinity)
} else {
$d = $val[4] || $val[5] || $val[6];
unless (defined $d) {
return undef unless defined $val[7] and defined $val[8];
$d = ($val[7] + $val[8]) / 2;
}
}
return 0 unless $f and $val[2];
my $t = $val[1] * $val[2] * ($d * 1000 - $f) / ($f * $f);
my @v = ($d / (1 + $t), $d / (1 - $t));
$v[1] < 0 and $v[1] = 0; # 0 means 'inf'
return join(' ',@v);
},
PrintConv => q{
$val =~ tr/,/./; # in case locale is whacky
my @v = split ' ', $val;
$v[1] or return sprintf("inf (%.2f m - inf)", $v[0]);
my $dof = $v[1] - $v[0];
my $fmt = ($dof>0 and $dof<0.02) ? "%.3f" : "%.2f";
return sprintf("$fmt m ($fmt - $fmt m)",$dof,$v[0],$v[1]);
},
},
FOV => {
Description => 'Field Of View',
Notes => q{
calculated for the long image dimension. This value may be incorrect for
fisheye lenses, or if the image has been resized
},
Require => {
0 => 'FocalLength',
1 => 'ScaleFactor35efl',
},
Desire => {
2 => 'FocusDistance', # (multiply by 1000 to convert to mm)
},
# ref http://www.bobatkins.com/photography/technical/field_of_view.html
# (calculations below apply to rectilinear lenses only, not fisheye)
ValueConv => q{
ToFloat(@val);
return undef unless $val[0] and $val[1];
my $corr = 1;
if ($val[2]) {
my $d = 1000 * $val[2] - $val[0];
$corr += $val[0]/$d if $d > 0;
}
my $fd2 = atan2(36, 2*$val[0]*$val[1]*$corr);
my @fov = ( $fd2 * 360 / 3.14159 );
if ($val[2] and $val[2] > 0 and $val[2] < 10000) {
push @fov, 2 * $val[2] * sin($fd2) / cos($fd2);
}
return join(' ', @fov);
},
PrintConv => q{
my @v = split(' ',$val);
my $str = sprintf("%.1f deg", $v[0]);
$str .= sprintf(" (%.2f m)", $v[1]) if $v[1];
return $str;
},
},
# generate DateTimeOriginal from Date and Time Created if not extracted already
DateTimeOriginal => {
Condition => 'not defined $$self{VALUE}{DateTimeOriginal}',
Description => 'Date/Time Original',
Groups => { 2 => 'Time' },
Desire => {
0 => 'DateTimeCreated',
1 => 'DateCreated',
2 => 'TimeCreated',
},
RawConv => '($val[1] and $val[2]) ? $val : undef',
ValueConv => q{
return $val[0] if $val[0] and $val[0]=~/ /;
return "$val[1] $val[2]";
},
PrintConv => '$self->ConvertDateTime($val)',
},
ThumbnailImage => {
Groups => { 0 => 'EXIF', 1 => 'IFD1', 2 => 'Preview' },
Writable => 1,
WriteGroup => 'All',
WriteCheck => '$self->CheckImage(\$val)',
WriteAlso => {
# (the 0xfeedfeed values are translated in the Exif write routine)
ThumbnailOffset => 'defined $val ? 0xfeedfeed : undef',
ThumbnailLength => 'defined $val ? 0xfeedfeed : undef',
},
Require => {
0 => 'ThumbnailOffset',
1 => 'ThumbnailLength',
},
Notes => q{
this tag is writable, and may be used to update existing thumbnails, but may
only create a thumbnail in IFD1 of certain types of files. Note that for
this and other Composite embedded-image tags the family 0 and 1 groups match
those of the originating tags
},
# retrieve the thumbnail from our EXIF data
RawConv => q{
@grps = $self->GetGroup($$val{0}); # set groups from ThumbnailOffsets
Image::ExifTool::Exif::ExtractImage($self,$val[0],$val[1],"ThumbnailImage");
},
},
ThumbnailTIFF => {
Groups => { 2 => 'Preview' },
Require => {
0 => 'SubfileType',
1 => 'Compression',
2 => 'ImageWidth',
3 => 'ImageHeight',
4 => 'BitsPerSample',
5 => 'PhotometricInterpretation',
6 => 'StripOffsets',
7 => 'SamplesPerPixel',
8 => 'RowsPerStrip',
9 => 'StripByteCounts',
},
Desire => {
10 => 'PlanarConfiguration',
11 => 'Orientation',
},
# rebuild the TIFF thumbnail from our EXIF data
RawConv => q{
my $tiff;
($tiff, @grps) = Image::ExifTool::Exif::RebuildTIFF($self, @val);
return $tiff;
},
},
PreviewImage => {
Groups => { 0 => 'EXIF', 1 => 'SubIFD', 2 => 'Preview' },
Writable => 1,
WriteGroup => 'All',
WriteCheck => '$self->CheckImage(\$val)',
DelCheck => '$val = ""; return undef', # can't delete, so set to empty string
WriteAlso => {
PreviewImageStart => 'defined $val ? 0xfeedfeed : undef',
PreviewImageLength => 'defined $val ? 0xfeedfeed : undef',
PreviewImageValid => 'defined $val and length $val ? 1 : 0', # (for Olympus)
},
Require => {
0 => 'PreviewImageStart',
1 => 'PreviewImageLength',
},
Desire => {
2 => 'PreviewImageValid',
# (DNG and A100 ARW may be have 2 preview images)
3 => 'PreviewImageStart (1)',
4 => 'PreviewImageLength (1)',
},
Notes => q{
this tag is writable, and may be used to update existing embedded images,
but not create or delete them
},
# note: extract 2nd preview, but ignore double-referenced preview
# (in A100 ARW images, the 2nd PreviewImageLength from IFD0 may be wrong anyway)
RawConv => q{
if ($val[3] and $val[4] and $val[0] ne $val[3]) {
my %val = (
0 => 'PreviewImageStart (1)',
1 => 'PreviewImageLength (1)',
2 => 'PreviewImageValid',
);
$self->FoundTag($tagInfo, \%val);
}
return undef if defined $val[2] and not $val[2];
@grps = $self->GetGroup($$val{0});
return Image::ExifTool::Exif::ExtractImage($self,$val[0],$val[1],'PreviewImage');
},
},
JpgFromRaw => {
Groups => { 0 => 'EXIF', 1 => 'SubIFD', 2 => 'Preview' },
Writable => 1,
WriteGroup => 'All',
WriteCheck => '$self->CheckImage(\$val)',
# Note: ExifTool 10.38 had disabled the ability to delete this -- why?
# --> added the DelCheck in 10.61 to re-enable this
DelCheck => '$val = ""; return undef', # can't delete, so set to empty string
WriteAlso => {
JpgFromRawStart => 'defined $val ? 0xfeedfeed : undef',
JpgFromRawLength => 'defined $val ? 0xfeedfeed : undef',
},
Require => {
0 => 'JpgFromRawStart',
1 => 'JpgFromRawLength',
},
Notes => q{
this tag is writable, and may be used to update existing embedded images,
but not create or delete them
},
RawConv => q{
@grps = $self->GetGroup($$val{0});
return Image::ExifTool::Exif::ExtractImage($self,$val[0],$val[1],"JpgFromRaw");
},
},
OtherImage => {
Groups => { 0 => 'EXIF', 1 => 'SubIFD', 2 => 'Preview' },
Writable => 1,
WriteGroup => 'All',
WriteCheck => '$self->CheckImage(\$val)',
DelCheck => '$val = ""; return undef', # can't delete, so set to empty string
WriteAlso => {
OtherImageStart => 'defined $val ? 0xfeedfeed : undef',
OtherImageLength => 'defined $val ? 0xfeedfeed : undef',
},
Require => {
0 => 'OtherImageStart',
1 => 'OtherImageLength',
},
Notes => q{
this tag is writable, and may be used to update existing embedded images,
but not create or delete them
},
# retrieve the thumbnail from our EXIF data
RawConv => q{
@grps = $self->GetGroup($$val{0});
Image::ExifTool::Exif::ExtractImage($self,$val[0],$val[1],"OtherImage");
},
},
PreviewImageSize => {
Require => {
0 => 'PreviewImageWidth',
1 => 'PreviewImageHeight',
},
ValueConv => '"$val[0]x$val[1]"',
},
SubSecDateTimeOriginal => {
Description => 'Date/Time Original',
Groups => { 2 => 'Time' },
Writable => 1,
Shift => 0, # don't shift this tag
Require => {
0 => 'EXIF:DateTimeOriginal',
},
Desire => {
1 => 'SubSecTimeOriginal',
2 => 'OffsetTimeOriginal',
},
WriteAlso => {
'EXIF:DateTimeOriginal' => '($val and $val=~/^(\d{4}:\d{2}:\d{2} \d{2}:\d{2}:\d{2})/) ? $1 : undef',
'EXIF:SubSecTimeOriginal' => '($val and $val=~/\.(\d+)/) ? $1 : undef',
'EXIF:OffsetTimeOriginal' => '($val and $val=~/([-+]\d{2}:\d{2}|Z)$/) ? ($1 eq "Z" ? "+00:00" : $1) : undef',
},
%subSecConv,
},
SubSecCreateDate => {
Description => 'Create Date',
Groups => { 2 => 'Time' },
Writable => 1,
Shift => 0, # don't shift this tag
Require => {
0 => 'EXIF:CreateDate',
},
Desire => {
1 => 'SubSecTimeDigitized',
2 => 'OffsetTimeDigitized',
},
WriteAlso => {
'EXIF:CreateDate' => '($val and $val=~/^(\d{4}:\d{2}:\d{2} \d{2}:\d{2}:\d{2})/) ? $1 : undef',
'EXIF:SubSecTimeDigitized' => '($val and $val=~/\.(\d+)/) ? $1 : undef',
'EXIF:OffsetTimeDigitized' => '($val and $val=~/([-+]\d{2}:\d{2}|Z)$/) ? ($1 eq "Z" ? "+00:00" : $1) : undef',
},
%subSecConv,
},
SubSecModifyDate => {
Description => 'Modify Date',
Groups => { 2 => 'Time' },
Writable => 1,
Shift => 0, # don't shift this tag
Require => {
0 => 'EXIF:ModifyDate',
},
Desire => {
1 => 'SubSecTime',
2 => 'OffsetTime',
},
WriteAlso => {
'EXIF:ModifyDate' => '($val and $val=~/^(\d{4}:\d{2}:\d{2} \d{2}:\d{2}:\d{2})/) ? $1 : undef',
'EXIF:SubSecTime' => '($val and $val=~/\.(\d+)/) ? $1 : undef',
'EXIF:OffsetTime' => '($val and $val=~/([-+]\d{2}:\d{2}|Z)$/) ? ($1 eq "Z" ? "+00:00" : $1) : undef',
},
%subSecConv,
},
CFAPattern => {
Require => {
0 => 'CFARepeatPatternDim',
1 => 'CFAPattern2',
},
# generate CFAPattern
ValueConv => q{
my @a = split / /, $val[0];
my @b = split / /, $val[1];
return '?' unless @a==2 and @b==$a[0]*$a[1];
return "$a[0] $a[1] @b";
},
PrintConv => 'Image::ExifTool::Exif::PrintCFAPattern($val)',
},
RedBalance => {
Groups => { 2 => 'Camera' },
Desire => {
0 => 'WB_RGGBLevels',
1 => 'WB_RGBGLevels',
2 => 'WB_RBGGLevels',
3 => 'WB_GRBGLevels',
4 => 'WB_GRGBLevels',
5 => 'WB_GBRGLevels',
6 => 'WB_RGBLevels',
7 => 'WB_GRBLevels',
8 => 'WB_RBLevels',
9 => 'WBRedLevel', # red
10 => 'WBGreenLevel',
},
ValueConv => 'Image::ExifTool::Exif::RedBlueBalance(0,@val)',
PrintConv => 'int($val * 1e6 + 0.5) * 1e-6',
},
BlueBalance => {
Groups => { 2 => 'Camera' },
Desire => {
0 => 'WB_RGGBLevels',
1 => 'WB_RGBGLevels',
2 => 'WB_RBGGLevels',
3 => 'WB_GRBGLevels',
4 => 'WB_GRGBLevels',
5 => 'WB_GBRGLevels',
6 => 'WB_RGBLevels',
7 => 'WB_GRBLevels',
8 => 'WB_RBLevels',
9 => 'WBBlueLevel', # blue
10 => 'WBGreenLevel',
},
ValueConv => 'Image::ExifTool::Exif::RedBlueBalance(1,@val)',
PrintConv => 'int($val * 1e6 + 0.5) * 1e-6',
},
GPSPosition => {
Groups => { 2 => 'Location' },
Writable => 1,
Protected => 1,
WriteAlso => {
GPSLatitude => '$val =~ /(.*?)( ?[NS])?,/ ? $1 : undef',
GPSLatitudeRef => '$val =~ /(-?)(.*?) ?([NS]?),/ ? ($3 || ($1 ? "S" : "N")) : undef',
GPSLongitude => '$val =~ /, ?(.*?)( ?[EW]?)$/ ? $1 : undef',
GPSLongitudeRef => '$val =~ /, ?(-?)(.*?) ?([EW]?)$/ ? ($3 || ($1 ? "W" : "E")) : undef',
},
PrintConvInv => q{
return undef unless $val =~ /(.*? ?[NS]?), ?(.*? ?[EW]?)$/;
my ($lat, $lon) = ($1, $2);
require Image::ExifTool::GPS;
$lat = Image::ExifTool::GPS::ToDegrees($lat, 1, "lat");
$lon = Image::ExifTool::GPS::ToDegrees($lon, 1, "lon");
return "$lat, $lon";
},
Require => {
0 => 'GPSLatitude',
1 => 'GPSLongitude',
},
Priority => 0,
Notes => q{
when written, writes GPSLatitude, GPSLatitudeRef, GPSLongitude and
GPSLongitudeRef. This tag may be written using the same coordinate
format as provided by Google Maps when right-clicking on a location
},
ValueConv => '(length($val[0]) or length($val[1])) ? "$val[0] $val[1]" : undef',
PrintConv => '"$prt[0], $prt[1]"',
},
LensID => {
Groups => { 2 => 'Camera' },
Require => 'LensType',
Desire => {
1 => 'FocalLength',
2 => 'MaxAperture',
3 => 'MaxApertureValue',
4 => 'MinFocalLength',
5 => 'MaxFocalLength',
6 => 'LensModel',
7 => 'LensFocalRange',
8 => 'LensSpec',
9 => 'LensType2',
10 => 'LensType3',
11 => 'LensFocalLength', # (for Pentax to check for converter)
12 => 'RFLensType',
},
Notes => q{
attempt to identify the actual lens from all lenses with a given LensType.
Applies only to LensType values with a lookup table. May be configured
by adding user-defined lenses
},
# this LensID is only valid if the LensType has a PrintConv or is a model name
RawConv => q{
my $printConv = $$self{TAG_INFO}{LensType}{PrintConv};
return $val if ref $printConv eq 'HASH' or (ref $printConv eq 'ARRAY' and
ref $$printConv[0] eq 'HASH') or $val[0] =~ /(mm|\d\/F)/;
return undef;
},
ValueConv => '$val',
PrintConv => q{
my $pcv;
# use LensType2 instead of LensType if available and valid (Sony E-mount lenses)
# (0x8000 or greater; 0 for several older/3rd-party E-mount lenses)
if (defined $val[9] and ($val[9] & 0x8000 or $val[9] == 0)) {
$val[0] = $val[9];
$prt[0] = $prt[9];
# Particularly GM lenses: often LensType2=0 but LensType3 is available and valid: use LensType3.
if ($val[9] == 0 and $val[10] & 0x8000) {
$val[0] = $val[10];
$prt[0] = $prt[10];
}
$pcv = $$self{TAG_INFO}{LensType2}{PrintConv};
}
# use Canon RFLensType if available
if ($val[12]) {
$val[0] = $val[12];
$prt[0] = $prt[12];
$pcv = $$self{TAG_INFO}{RFLensType}{PrintConv};
}
my $lens = Image::ExifTool::Exif::PrintLensID($self, $prt[0], $pcv, $prt[8], @val);
# check for use of lens converter (Pentax K-3)
if ($val[11] and $val[1] and $lens) {
my $conv = $val[1] / $val[11];
$lens .= sprintf(' + %.1fx converter', $conv) if $conv > 1.1;
}
return $lens;
},
},
'LensID-2' => {
Name => 'LensID',
Groups => { 2 => 'Camera' },
Desire => {
0 => 'LensModel',
1 => 'Lens',
2 => 'XMP-aux:LensID',
3 => 'Make',
},
Inhibit => {
4 => 'Composite:LensID',
},
RawConv => q{
return undef if defined $val[2] and defined $val[3];
return $val if defined $val[0] and $val[0] =~ /(mm|\d\/F)/;
return $val if defined $val[1] and $val[1] =~ /(mm|\d\/F)/;
return undef;
},
ValueConv => q{
return $val[0] if defined $val[0] and $val[0] =~ /(mm|\d\/F)/;
return $val[1];
},
PrintConv => '$_=$val; s/(\d)\/F/$1mm F/; s/mmF/mm F/; s/(\d) mm/${1}mm/; s/ - /-/; $_',
},
);
# table for unknown IFD entries
%Image::ExifTool::Exif::Unknown = (
GROUPS => { 0 => 'EXIF', 1 => 'UnknownIFD', 2 => 'Image'},
WRITE_PROC => \&WriteExif,
);
# add our composite tags
Image::ExifTool::AddCompositeTags('Image::ExifTool::Exif');
#------------------------------------------------------------------------------
# AutoLoad our writer routines when necessary
#
sub AUTOLOAD
{
return Image::ExifTool::DoAutoLoad($AUTOLOAD, @_);
}
#------------------------------------------------------------------------------
# Identify RAW file type for some TIFF-based formats using Compression value
# Inputs: 0) ExifTool object reference, 1) Compression value
# - sets TIFF_TYPE and FileType if identified
sub IdentifyRawFile($$)
{
my ($et, $comp) = @_;
if ($$et{FILE_TYPE} eq 'TIFF' and not $$et{IdentifiedRawFile}) {
if ($compression{$comp} and $compression{$comp} =~ /^\w+ ([A-Z]{3}) Compressed$/) {
$et->OverrideFileType($$et{TIFF_TYPE} = $1);
$$et{IdentifiedRawFile} = 1;
}
}
}
#------------------------------------------------------------------------------
# Calculate LV (Light Value)
# Inputs: 0) Aperture, 1) ShutterSpeed, 2) ISO
# Returns: LV value (and converts input values to floating point if necessary)
sub CalculateLV($$$)
{
local $_;
# do validity checks on arguments
return undef unless @_ >= 3;
foreach (@_) {
return undef unless $_ and /([+-]?(?=\d|\.\d)\d*(\.\d*)?([Ee]([+-]?\d+))?)/ and $1 > 0;
$_ = $1; # extract float from any other garbage
}
# (A light value of 0 is defined as f/1.0 at 1 second with ISO 100)
return log($_[0] * $_[0] * 100 / ($_[1] * $_[2])) / log(2);
}
#------------------------------------------------------------------------------
# Calculate scale factor for 35mm effective focal length (ref 26/PH)
# Inputs: 0) ExifTool object ref
# 1) Focal length
# 2) Focal length in 35mm format
# 3) Canon digital zoom factor
# 4) Focal plane diagonal size (in mm)
# 5) Sensor size (X and Y in mm)
# 6/7) Focal plane X/Y size (in mm)
# 8) focal plane resolution units (1=None,2=inches,3=cm,4=mm,5=um)
# 9/10) Focal plane X/Y resolution
# 11/12,13/14...) Image width/height in order of precedence (first valid pair is used)
# Returns: 35mm conversion factor (or undefined if it can't be calculated)
sub CalcScaleFactor35efl
{
my $et = shift;
my $res = $_[7]; # save resolution units (in case they have been converted to string)
my $sensXY = $_[4];
Image::ExifTool::ToFloat(@_);
my $focal = shift;
my $foc35 = shift;
return $foc35 / $focal if $focal and $foc35;
my $digz = shift || 1;
my $diag = shift;
my $sens = shift;
# calculate Canon sensor size using a dedicated algorithm
if ($$et{Make} eq 'Canon') {
require Image::ExifTool::Canon;
my $canonDiag = Image::ExifTool::Canon::CalcSensorDiag(
$$et{RATIONAL}{FocalPlaneXResolution},
$$et{RATIONAL}{FocalPlaneYResolution},
);
$diag = $canonDiag if $canonDiag;
}
unless ($diag and Image::ExifTool::IsFloat($diag)) {
if ($sens and $sensXY =~ / (\d+(\.?\d*)?)$/) {
$diag = sqrt($sens * $sens + $1 * $1);
} else {
undef $diag;
my $xsize = shift;
my $ysize = shift;
if ($xsize and $ysize) {
# validate by checking aspect ratio because FocalPlaneX/YSize is not reliable
my $a = $xsize / $ysize;
if (abs($a-1.3333) < .1 or abs($a-1.5) < .1) {
$diag = sqrt($xsize * $xsize + $ysize * $ysize);
}
}
}
unless ($diag) {
# get number of mm in units (assume inches unless otherwise specified)
my %lkup = ( 3=>10, 4=>1, 5=>0.001 , cm=>10, mm=>1, um=>0.001 );
my $units = $lkup{ shift() || $res || '' } || 25.4;
my $x_res = shift || return undef;
my $y_res = shift || $x_res;
Image::ExifTool::IsFloat($x_res) and $x_res != 0 or return undef;
Image::ExifTool::IsFloat($y_res) and $y_res != 0 or return undef;
my ($w, $h);
for (;;) {
@_ < 2 and return undef;
$w = shift;
$h = shift;
next unless $w and $h;
my $a = $w / $h;
last if $a > 0.5 and $a < 2; # stop if we get a reasonable value
}
# calculate focal plane size in mm
$w *= $units / $x_res;
$h *= $units / $y_res;
$diag = sqrt($w*$w+$h*$h);
# make sure size is reasonable
return undef unless $diag > 1 and $diag < 100;
}
}
return sqrt(36*36+24*24) * $digz / $diag;
}
#------------------------------------------------------------------------------
# Print exposure compensation fraction
sub PrintFraction($)
{
my $val = shift;
my $str;
if (defined $val) {
$val *= 1.00001; # avoid round-off errors
if (not $val) {
$str = '0';
} elsif (int($val)/$val > 0.999) {
$str = sprintf("%+d", int($val));
} elsif ((int($val*2))/($val*2) > 0.999) {
$str = sprintf("%+d/2", int($val * 2));
} elsif ((int($val*3))/($val*3) > 0.999) {
$str = sprintf("%+d/3", int($val * 3));
} else {
$str = sprintf("%+.3g", $val);
}
}
return $str;
}
#------------------------------------------------------------------------------
# Convert fraction or number to floating point value (or 'undef' or 'inf')
sub ConvertFraction($)
{
my $val = shift;
if ($val =~ m{([-+]?\d+)/(\d+)}) {
$val = $2 ? $1 / $2 : ($1 ? 'inf' : 'undef');
}
return $val;
}
#------------------------------------------------------------------------------
# Convert EXIF text to something readable
# Inputs: 0) ExifTool object reference, 1) EXIF text,
# 2) [optional] 1 to apply CharsetEXIF to ASCII text,
# 3) tag name for warning message (may be argument 2)
# Returns: text encoded according to Charset option (with trailing spaces removed)
sub ConvertExifText($$;$$)
{
my ($et, $val, $asciiFlex, $tag) = @_;
return $val if length($val) < 8;
my $id = substr($val, 0, 8);
my $str = substr($val, 8);
my $type;
delete $$et{WrongByteOrder};
if ($$et{OPTIONS}{Validate} and $id =~ /^(ASCII|UNICODE|JIS)?\0* \0*$/) {
$et->Warn(($1 || 'Undefined') . ' text header' . ($tag ? " for $tag" : '') . ' has spaces instead of nulls');
}
# Note: allow spaces instead of nulls in the ID codes because
# it is fairly common for camera manufacturers to get this wrong
# (also handle Canon ZoomBrowser EX 4.5 null followed by 7 bytes of garbage)
if ($id =~ /^(ASCII)?(\0|[\0 ]+$)/) {
# truncate at null terminator (shouldn't have a null based on the
# EXIF spec, but it seems that few people actually read the spec)
$str =~ s/\0.*//s;
# allow ASCII text to contain any other specified encoding
if ($asciiFlex and $asciiFlex eq '1') {
my $enc = $et->Options('CharsetEXIF');
$str = $et->Decode($str, $enc) if $enc;
}
# by the EXIF spec, the following string should be "UNICODE\0", but
# apparently Kodak sometimes uses "Unicode\0" in the APP3 "Meta" information.
# However, unfortunately Ricoh uses "Unicode\0" in the RR30 EXIF UserComment
# when the text is actually ASCII, so only recognize uppercase "UNICODE\0".
} elsif ($id =~ /^(UNICODE)[\0 ]$/) {
$type = $1;
# MicrosoftPhoto writes as little-endian even in big-endian EXIF,
# so we must guess at the true byte ordering
$str = $et->Decode($str, 'UTF16', 'Unknown');
} elsif ($id =~ /^(JIS)[\0 ]{5}$/) {
$type = $1;
$str = $et->Decode($str, 'JIS', 'Unknown');
} else {
$tag = $asciiFlex if $asciiFlex and $asciiFlex ne '1';
$et->Warn('Invalid EXIF text encoding' . ($tag ? " for $tag" : ''));
$str = $id . $str;
}
if ($$et{WrongByteOrder} and $$et{OPTIONS}{Validate}) {
$et->Warn('Wrong byte order for EXIF' . ($tag ? " $tag" : '') .
($type ? " $type" : '') . ' text');
}
$str =~ s/ +$//; # trim trailing blanks
return $str;
}
#------------------------------------------------------------------------------
# Print conversion for SpatialFrequencyResponse
sub PrintSFR($)
{
my $val = shift;
return $val unless length $val > 4;
my ($n, $m) = (Get16u(\$val, 0), Get16u(\$val, 2));
my @cols = split /\0/, substr($val, 4), $n+1;
my $pos = length($val) - 8 * $n * $m;
return $val unless @cols == $n+1 and $pos >= 4;
pop @cols;
my ($i, $j);
for ($i=0; $i<$n; ++$i) {
my @rows;
for ($j=0; $j<$m; ++$j) {
push @rows, Image::ExifTool::GetRational64u(\$val, $pos + 8*($i+$j*$n));
}
$cols[$i] .= '=' . join(',',@rows) . '';
}
return join '; ', @cols;
}
#------------------------------------------------------------------------------
# Print numerical parameter value (with sign, or 'Normal' for zero)
# Inputs: 0) value, 1) flag for inverse conversion, 2) conversion hash reference
sub PrintParameter($$$)
{
my ($val, $inv, $conv) = @_;
return $val if $inv;
if ($val > 0) {
if ($val > 0xfff0) { # a negative value in disguise?
$val = $val - 0x10000;
} else {
$val = "+$val";
}
}
return $val;
}
#------------------------------------------------------------------------------
# Convert parameter back to standard EXIF value
# 0,0.00,etc or "Normal" => 0
# -1,-2,etc or "Soft" or "Low" => 1
# +1,+2,1,2,etc or "Hard" or "High" => 2
sub ConvertParameter($)
{
my $val = shift;
my $isFloat = Image::ExifTool::IsFloat($val);
# normal is a value of zero
return 0 if $val =~ /\bn/i or ($isFloat and $val == 0);
# "soft", "low" or any negative number is a value of 1
return 1 if $val =~ /\b(s|l)/i or ($isFloat and $val < 0);
# "hard", "high" or any positive number is a value of 2
return 2 if $val =~ /\bh/i or $isFloat;
return undef;
}
#------------------------------------------------------------------------------
# Calculate Red/BlueBalance
# Inputs: 0) 0=red, 1=blue, 1-8) WB_RGGB/RGBG/RBGG/GRBG/GRGB/RGB/GRB/RBLevels,
# 8) red or blue level, 9) green level
my @rggbLookup = (
# indices for R, G, G and B components in input value
[ 0, 1, 2, 3 ], # 0 RGGB
[ 0, 1, 3, 2 ], # 1 RGBG
[ 0, 2, 3, 1 ], # 2 RBGG
[ 1, 0, 3, 2 ], # 3 GRBG
[ 1, 0, 2, 3 ], # 4 GRGB
[ 2, 3, 0, 1 ], # 5 GBRG
[ 0, 1, 1, 2 ], # 6 RGB
[ 1, 0, 0, 2 ], # 7 GRB
[ 0, 256, 256, 1 ], # 8 RB (green level is 256)
);
sub RedBlueBalance($@)
{
my $blue = shift;
my ($i, $val, $levels);
for ($i=0; $i<@rggbLookup; ++$i) {
$levels = shift or next;
my @levels = split ' ', $levels;
next if @levels < 2;
my $lookup = $rggbLookup[$i];
my $g = $$lookup[1]; # get green level or index
if ($g < 4) {
next if @levels < 3;
$g = ($levels[$g] + $levels[$$lookup[2]]) / 2 or next;
} elsif ($levels[$$lookup[$blue * 3]] < 4) {
$g = 1; # Some Nikon cameras use a scaling factor of 1 (E5700)
}
$val = $levels[$$lookup[$blue * 3]] / $g;
last;
}
$val = $_[0] / $_[1] if not defined $val and ($_[0] and $_[1]);
return $val;
}
#------------------------------------------------------------------------------
# Print exposure time as a fraction
sub PrintExposureTime($)
{
my $secs = shift;
return $secs unless Image::ExifTool::IsFloat($secs);
if ($secs < 0.25001 and $secs > 0) {
return sprintf("1/%d",int(0.5 + 1/$secs));
}
$_ = sprintf("%.1f",$secs);
s/\.0$//;
return $_;
}
#------------------------------------------------------------------------------
# Print FNumber
sub PrintFNumber($)
{
my $val = shift;
if (Image::ExifTool::IsFloat($val) and $val > 0) {
# round to 1 decimal place, or 2 for values < 1.0
$val = sprintf(($val<1 ? "%.2f" : "%.1f"), $val);
}
return $val;
}
#------------------------------------------------------------------------------
# Decode raw CFAPattern value
# Inputs: 0) ExifTool ref, 1) binary value
# Returns: string of numbers
sub DecodeCFAPattern($$)
{
my ($self, $val) = @_;
# some panasonic cameras (SV-AS3, SV-AS30) write this in ascii (very odd)
if ($val =~ /^[0-6]+$/) {
$self->Warn('Incorrectly formatted CFAPattern', 1);
$val =~ tr/0-6/\x00-\x06/;
}
return $val unless length($val) >= 4;
my @a = unpack(GetByteOrder() eq 'II' ? 'v2C*' : 'n2C*', $val);
my $end = 2 + $a[0] * $a[1];
if ($end > @a) {
# try swapping byte order (I have seen this order different than in EXIF)
my ($x, $y) = unpack('n2',pack('v2',$a[0],$a[1]));
if (@a < 2 + $x * $y) {
$self->Warn('Invalid CFAPattern', 1);
} else {
($a[0], $a[1]) = ($x, $y);
# (can't technically be wrong because the order isn't well defined by the EXIF spec)
# $self->Warn('Wrong byte order for CFAPattern');
}
}
return "@a";
}
#------------------------------------------------------------------------------
# Print CFA Pattern
sub PrintCFAPattern($)
{
my $val = shift;
my @a = split ' ', $val;
return '<truncated data>' unless @a >= 2;
return '<zero pattern size>' unless $a[0] and $a[1];
my $end = 2 + $a[0] * $a[1];
return '<invalid pattern size>' if $end > @a;
my @cfaColor = qw(Red Green Blue Cyan Magenta Yellow White);
my ($pos, $rtnVal) = (2, '[');
for (;;) {
$rtnVal .= $cfaColor[$a[$pos]] || 'Unknown';
last if ++$pos >= $end;
($pos - 2) % $a[1] and $rtnVal .= ',', next;
$rtnVal .= '][';
}
return $rtnVal . ']';
}
#------------------------------------------------------------------------------
# Print Opcode List
# Inputs: 0) value, 1) flag for inverse conversion, 2) conversion hash reference
# Returns: converted value
sub PrintOpcode($$$)
{
my ($val, $inv, $conv) = @_;
return undef if $inv; # (can't do inverse conversion)
return '' unless length $$val > 4;
my $num = unpack('N', $$val);
my $pos = 4;
my ($i, @ops);
for ($i=0; $i<$num; ++$i) {
$pos + 16 <= length $$val or push(@ops, '<err>'), last;
my ($op, $ver, $flags, $len) = unpack("x${pos}N4", $$val);
push @ops, $$conv{$op} || "[opcode $op]";
$pos += 16 + $len;
}
return join ', ', @ops;
}
#------------------------------------------------------------------------------
# Print conversion for lens info
# Inputs: 0) string of values (min focal, max focal, min F, max F)
# Returns: string in the form "12-20mm f/3.8-4.5" or "50mm f/1.4"
sub PrintLensInfo($)
{
my $val = shift;
my @vals = split ' ', $val;
return $val unless @vals == 4;
my $c = 0;
foreach (@vals) {
Image::ExifTool::IsFloat($_) and ++$c, next;
$_ eq 'inf' and $_ = '?', ++$c, next;
$_ eq 'undef' and $_ = '?', ++$c, next;
}
return $val unless $c == 4;
$val = $vals[0];
# (the Pentax Q writes zero for upper value of fixed-focal-length lenses)
$val .= "-$vals[1]" if $vals[1] and $vals[1] ne $vals[0];
$val .= "mm f/$vals[2]";
$val .= "-$vals[3]" if $vals[3] and $vals[3] ne $vals[2];
return $val;
}
#------------------------------------------------------------------------------
# Get lens info from lens model string
# Inputs: 0) lens string, 1) flag to allow unknown "?" values
# Returns: 0) min focal, 1) max focal, 2) min aperture, 3) max aperture
# Notes: returns empty list if lens string could not be parsed
sub GetLensInfo($;$)
{
my ($lens, $unk) = @_;
# extract focal length and aperture ranges for this lens
my $pat = '\\d+(?:\\.\\d+)?';
$pat .= '|\\?' if $unk;
return () unless $lens =~ /($pat)(?:-($pat))?\s*mm.*?(?:[fF]\/?\s*)($pat)(?:-($pat))?/;
# ($1=short focal, $2=long focal, $3=max aperture wide, $4=max aperture tele)
my @a = ($1, $2, $3, $4);
$a[1] or $a[1] = $a[0];
$a[3] or $a[3] = $a[2];
if ($unk) {
local $_;
$_ eq '?' and $_ = 'undef' foreach @a;
}
return @a;
}
#------------------------------------------------------------------------------
# Match lens in list of possbilities based on value of LensModel
# Inputs: 0) reference to list of possible models, 1) LensModel string
# - updates list on return; guaranteed not to remove all list entries
sub MatchLensModel($$)
{
my ($try, $lensModel) = @_;
if (@$try > 1 and $lensModel) {
my (@filt, $pat);
# filter by focal length
if ($lensModel =~ /((\d+-)?\d+mm)/) {
my $focal = $1;
@filt = grep /$focal/, @$try;
@$try = @filt if @filt and @filt < @$try;
}
# filter by aperture
if (@$try > 1 and $lensModel =~ m{(?:F/?|1:)(\d+(\.\d+)?)}i) {
my $fnum = $1;
@filt = grep m{(F/?|1:)$fnum(\b|[A-Z])}i, @$try;
@$try = @filt if @filt and @filt < @$try;
}
# filter by model version, and other lens parameters
foreach $pat ('I+', 'USM') {
next unless @$try > 1 and $lensModel =~ /\b($pat)\b/;
my $val = $1;
@filt = grep /\b$val\b/, @$try;
@$try = @filt if @filt and @filt < @$try;
}
}
}
#------------------------------------------------------------------------------
# Attempt to identify the specific lens if multiple lenses have the same LensType
# Inputs: 0) ExifTool object ref, 1) LensType print value, 2) PrintConv hash ref,
# 3) LensSpec print value, 4) LensType numerical value, 5) FocalLength,
# 6) MaxAperture, 7) MaxApertureValue, 8) MinFocalLength, 9) MaxFocalLength,
# 10) LensModel, 11) LensFocalRange, 12) LensSpec
my %sonyEtype;
sub PrintLensID($$@)
{
my ($et, $lensTypePrt, $printConv, $lensSpecPrt, $lensType, $focalLength,
$maxAperture, $maxApertureValue, $shortFocal, $longFocal, $lensModel,
$lensFocalRange, $lensSpec) = @_;
# this logic relies on the LensType lookup:
return undef unless defined $lensType;
# get print conversion hash if necessary
$printConv or $printConv = $$et{TAG_INFO}{LensType}{PrintConv};
# just copy LensType PrintConv value if it was a lens name
# (Olympus or Panasonic -- just exclude things like Nikon and Leaf LensType)
unless (ref $printConv eq 'HASH') {
if (ref $printConv eq 'ARRAY' and ref $$printConv[0] eq 'HASH') {
$printConv = $$printConv[0];
$lensTypePrt =~ s/;.*//;
$lensType =~ s/ .*//;
} else {
return $lensTypePrt if $lensTypePrt =~ /mm/;
return $lensTypePrt if $lensTypePrt =~ s/(\d)\/F/$1mm F/;
return undef;
}
}
# get LensSpec information if available (Sony)
my ($sf0, $lf0, $sa0, $la0);
if ($lensSpecPrt) {
($sf0, $lf0, $sa0, $la0) = GetLensInfo($lensSpecPrt);
undef $sf0 unless $sa0; # (make sure aperture isn't zero)
}
# use MaxApertureValue if MaxAperture is not available
$maxAperture = $maxApertureValue unless $maxAperture;
if ($lensFocalRange and $lensFocalRange =~ /^(\d+)(?: (?:to )?(\d+))?$/) {
($shortFocal, $longFocal) = ($1, $2 || $1);
}
if ($$et{Make} eq 'SONY') {
if ($lensType eq 65535) {
# handle Sony E-type lenses when LensType2 isn't valid (NEX/ILCE models only)
if ($$et{Model} =~ /NEX|ILCE/) {
unless (%sonyEtype) {
my ($index, $i, %did, $lens);
require Image::ExifTool::Sony;
foreach (sort keys %Image::ExifTool::Sony::sonyLensTypes2) {
($lens = $Image::ExifTool::Sony::sonyLensTypes2{$_}) =~ s/ or .*//;
next if $did{$lens};
($i, $index) = $index ? ("65535.$index", $index + 1) : (65535, 1);
$did{$sonyEtype{$i} = $lens} = 1;
}
}
$printConv = \%sonyEtype;
}
} elsif ($lensType != 0xff00) {
# Patch for Metabones or other adapters on Sony E-mount cameras (ref Jos Roost)
# Metabones Canon EF to E-mount adapters add 0xef00, 0xbc00 or 0x7700 to the
# high byte for 2-byte Canon LensType values, so we need to adjust for these.
# Offset 0xef00 is also used by Sigma MC-11, Fotodiox and Viltrox EF-E adapters.
# Have to exclude A-mount Sigma Filtermatic with 'odd' LensType=0xff00.
require Image::ExifTool::Minolta;
if ($Image::ExifTool::Minolta::metabonesID{$lensType & 0xff00}) {
$lensType -= ($lensType >= 0xef00 ? 0xef00 : $lensType >= 0xbc00 ? 0xbc00 : 0x7700);
require Image::ExifTool::Canon;
$printConv = \%Image::ExifTool::Canon::canonLensTypes;
$lensTypePrt = $$printConv{$lensType} if $$printConv{$lensType};
# Test for Sigma MC-11 SA-E adapter with Sigma SA lens using 0x4900 offset.
# (upper limit of test cuts off two highest Sigma lenses, but prevents
# conflict with old Minolta 25xxx and higher ID's)
} elsif ($lensType >= 0x4900 and $lensType <= 0x590a) {
require Image::ExifTool::Sigma;
$lensType -= 0x4900;
$printConv = \%Image::ExifTool::Sigma::sigmaLensTypes;
$lensTypePrt = $$printConv{$lensType} if $$printConv{$lensType};
}
}
# (Min/MaxFocalLength may report the current focal length for Tamron zoom lenses)
} elsif ($shortFocal and $longFocal and (not $lensModel or $lensModel !~ /^TAMRON.*-\d+mm/)) {
# Canon (and some other makes) include makernote information
# which allows better lens identification
require Image::ExifTool::Canon;
return Image::ExifTool::Canon::PrintLensID($printConv, $lensType,
$shortFocal, $longFocal, $maxAperture, $lensModel);
}
my $lens = $$printConv{$lensType};
return ($lensModel || $lensTypePrt) unless $lens;
return $lens unless $$printConv{"$lensType.1"};
$lens =~ s/ or .*//s; # remove everything after "or"
# make list of all possible matching lenses
my @lenses = ( $lens );
my $i;
for ($i=1; $$printConv{"$lensType.$i"}; ++$i) {
push @lenses, $$printConv{"$lensType.$i"};
}
# attempt to determine actual lens
my (@matches, @best, @user, $diff);
foreach $lens (@lenses) {
push @user, $lens if $Image::ExifTool::userLens{$lens};
# sf = short focal
# lf = long focal
# sa = max aperture at short focal
# la = max aperture at long focal
my ($sf, $lf, $sa, $la) = GetLensInfo($lens);
next unless $sf;
# check against LensSpec parameters if available
if ($sf0) {
next if abs($sf - $sf0) > 0.5 or abs($sa - $sa0) > 0.15 or
abs($lf - $lf0) > 0.5 or abs($la - $la0) > 0.15;
# the basic parameters match, but also check against additional lens features:
# for Sony A and E lenses, the full LensSpec string should match with end of LensType,
# excluding any part between () at the end, and preceded by a space (the space
# ensures that e.g. Zeiss Loxia 21mm having LensSpec "E 21mm F2.8" will not be
# identified as "Sony FE 21mm F2.8 (SEL28F20 + SEL075UWC)")
$lensSpecPrt and $lens =~ / \Q$lensSpecPrt\E( \(| GM$|$)/ and @best = ( $lens ), last;
# exactly-matching Sony lens should have been found above, so only add non-Sony lenses
push @best, $lens unless $lens =~ /^Sony /;
next;
}
# adjust focal length and aperture if teleconverter is attached (Minolta)
if ($lens =~ / \+ .*? (\d+(\.\d+)?)x( |$)/) {
$sf *= $1; $lf *= $1;
$sa *= $1; $la *= $1;
}
# see if we can rule out this lens using FocalLength and MaxAperture
if ($focalLength) {
next if $focalLength < $sf - 0.5;
next if $focalLength > $lf + 0.5;
}
if ($maxAperture) {
# it seems that most manufacturers set MaxAperture and MaxApertureValue
# to the maximum aperture (smallest F number) for the current focal length
# of the lens, so assume that MaxAperture varies with focal length and find
# the closest match (this is somewhat contrary to the EXIF specification which
# states "The smallest F number of the lens", without mention of focal length)
next if $maxAperture < $sa - 0.15; # (0.15 is arbitrary)
next if $maxAperture > $la + 0.15;
# now determine the best match for this aperture
my $aa; # approximate maximum aperture at this focal length
if ($sf == $lf or $sa == $la or $focalLength <= $sf) {
# either 1) prime lens, 2) fixed-aperture zoom, or 3) zoom at min focal
$aa = $sa;
} elsif ($focalLength >= $lf) {
$aa = $la;
} else {
# assume a log-log variation of max aperture with focal length
# (see http://regex.info/blog/2006-10-05/263)
$aa = exp(log($sa) + (log($la)-log($sa)) / (log($lf)-log($sf)) *
(log($focalLength)-log($sf)));
# a linear relationship between 1/FocalLength and 1/MaxAperture fits Sony better (ref 27)
#$aa = 1 / (1/$sa + (1/$focalLength - 1/$sf) * (1/$la - 1/$sa) / (1/$lf - 1/$sf));
}
my $d = abs($maxAperture - $aa);
if (defined $diff) {
$d > $diff + 0.15 and next; # (0.15 is arbitrary)
$d < $diff - 0.15 and undef @best;
}
$diff = $d;
push @best, $lens;
}
push @matches, $lens;
}
# return the user-defined lens if it exists
if (@user) {
# choose the best match if we have more than one
if (@user > 1) {
my ($try, @good);
foreach $try (\@best, \@matches) {
$Image::ExifTool::userLens{$_} and push @good, $_ foreach @$try;
return join(' or ', @good) if @good;
}
}
return join(' or ', @user);
}
# return the best match(es) from the possible lenses, after checking against LensModel
@best = @matches unless @best;
if (@best) {
MatchLensModel(\@best, $lensModel);
return join(' or ', @best);
}
$lens = $$printConv{$lensType};
return $lensModel if $lensModel and $lens =~ / or /; # (eg. Sony NEX-5N)
return $lens;
}
#------------------------------------------------------------------------------
# Translate date into standard EXIF format
# Inputs: 0) date
# Returns: date in format '2003:10:22'
# - bad formats recognized: '2003-10-22','2003/10/22','2003 10 22','20031022'
# - removes null terminator if it exists
sub ExifDate($)
{
my $date = shift;
$date =~ s/\0$//; # remove any null terminator
# separate year:month:day with colons
# (have seen many other characters, including nulls, used erroneously)
$date =~ s/(\d{4})[^\d]*(\d{2})[^\d]*(\d{2})$/$1:$2:$3/;
return $date;
}
#------------------------------------------------------------------------------
# Translate time into standard EXIF format
# Inputs: 0) time
# Returns: time in format '10:30:55'
# - bad formats recognized: '10 30 55', '103055', '103055+0500'
# - removes null terminator if it exists
# - leaves time zone intact if specified (eg. '10:30:55+05:00')
sub ExifTime($)
{
my $time = shift;
$time =~ tr/ /:/; # use ':' (not ' ') as a separator
$time =~ s/\0$//; # remove any null terminator
# add separators if they don't exist
$time =~ s/^(\d{2})(\d{2})(\d{2})/$1:$2:$3/;
$time =~ s/([+-]\d{2})(\d{2})\s*$/$1:$2/; # to timezone too
return $time;
}
#------------------------------------------------------------------------------
# Generate TIFF file from scratch (in current byte order)
# Inputs: 0) hash of IFD entries (TagID => Value; multiple values space-delimited)
# 1) raw image data reference
# Returns: TIFF image data, or undef on error
sub GenerateTIFF($$)
{
my ($entries, $dataPt) = @_;
my ($rtnVal, $tag, $offsetPos);
my $num = scalar keys %$entries;
my $ifdBuff = GetByteOrder() . Set16u(42) . Set32u(8) . Set16u($num);
my $valBuff = '';
my $tagTablePtr = GetTagTable('Image::ExifTool::Exif::Main');
foreach $tag (sort { $a <=> $b } keys %$entries) {
my $tagInfo = $$tagTablePtr{$tag};
my $fmt = ref $tagInfo eq 'HASH' ? $$tagInfo{Writable} : 'int32u';
return undef unless defined $fmt;
my $val = Image::ExifTool::WriteValue($$entries{$tag}, $fmt, -1);
return undef unless defined $val;
my $format = $formatNumber{$fmt};
$ifdBuff .= Set16u($tag) . Set16u($format) . Set32u(length($val)/$formatSize[$format]);
$offsetPos = length($ifdBuff) if $tag == 0x111; # (remember StripOffsets position)
if (length $val > 4) {
$ifdBuff .= Set32u(10 + 12 * $num + 4 + length($valBuff));
$valBuff .= $val;
} else {
$val .= "\0" x (4 - length($val)) if length $val < 4;
$ifdBuff .= $val;
}
}
$ifdBuff .= "\0\0\0\0"; # (no IFD1)
return undef unless $offsetPos;
Set32u(length($ifdBuff) + length($valBuff), \$ifdBuff, $offsetPos);
return $ifdBuff . $valBuff . $$dataPt;
}
#------------------------------------------------------------------------------
# Rebuild TIFF thumbnail(s)/preview(s) into stand-alone files with current byte order
# Inputs: 0) ExifTool ref, 1) SubfileType, 2) Compression, 3) ImageWidth, 4) ImageHeight,
# 5) BitsPerSample, 6) PhotometricInterpretation, 7) StripOffsets, 8) SamplesPerPixel,
# 9) RowsPerStrip, 10) StripByteCounts, 10) PlanarConfiguration, 11) Orientation
# Returns: 0) TIFF image or undef, 1/2) Family 0/1 groups for TIFF preview IFD
sub RebuildTIFF($;@)
{
local $_;
my $et = $_[0];
my $value = $$et{VALUE};
my ($i, $j, $rtn, $grp0, $grp1);
return undef if $$et{FILE_TYPE} eq 'RWZ';
SubFile:
for ($i=0; ; ++$i) {
my $key = 'SubfileType' . ($i ? " ($i)" : '');
last unless defined $$value{$key};
next unless $$value{$key} == 1; # (reduced-resolution image)
my $grp = $et->GetGroup($key, 1);
my $cmp = $et->FindValue('Compression', $grp);
next unless $cmp == 1; # (no compression)
my %vals = (Compression=>$cmp, PlanarConfiguration=>1, Orientation=>1);
foreach (qw(ImageWidth ImageHeight BitsPerSample PhotometricInterpretation
StripOffsets SamplesPerPixel RowsPerStrip StripByteCounts
PlanarConfiguration Orientation))
{
my $val = $et->FindValue($_, $grp);
defined $val and $vals{$_} = $val, next;
next SubFile unless defined $vals{$_};
}
my ($w, $h) = @vals{'ImageWidth', 'ImageHeight'};
my @bits = split ' ', $vals{BitsPerSample};
my $rowBytes = 0;
$rowBytes += $w * int(($_+7)/8) foreach @bits;
my $dat = '';
my @off = split ' ', $vals{StripOffsets};
my @len = split ' ', $vals{StripByteCounts};
# read the image data
for ($j=0; $j<@off; ++$j) {
next SubFile unless $len[$j] == $rowBytes * $vals{RowsPerStrip};
my $tmp = $et->ExtractBinary($off[$j], $len[$j]);
next SubFile unless defined $tmp;
$dat .= $tmp;
}
# generate the TIFF image
my %entries = (
0x0fe => 0, # SubfileType = 0
0x100 => $w, # ImageWidth
0x101 => $h, # ImageHeight
0x102 => $vals{BitsPerSample},# BitsPerSample
0x103 => $vals{Compression},# Compression
0x106 => $vals{PhotometricInterpretation}, # PhotometricInterpretation
0x111 => 0, # StripOffsets (will be adjusted later)
0x112 => $vals{Orientation},# Orientation
0x115 => $vals{SamplesPerPixel}, # SamplesPerPixel
0x116 => $h, # RowsPerStrip
0x117 => $h * $rowBytes, # StripByteCounts
0x11a => 72, # XResolution = 72
0x11b => 72, # YResolution = 72
0x11c => $vals{PlanarConfiguration}, # PlanarConfiguration
0x128 => 2, # ResolutionUnit = 2
);
my $img = GenerateTIFF(\%entries, \$dat);
if (not defined $img) {
$et->Warn('Invalid ' . ($w > 256 ? 'Preview' : 'Thumbnail') . 'TIFF data');
} elsif ($rtn or $w > 256) { # (call it a preview if larger than 256 pixels)
$et->FoundTag('PreviewTIFF', \$img, $et->GetGroup($key));
} else {
$rtn = \$img;
($grp0, $grp1) = $et->GetGroup($key);
}
}
return $rtn unless wantarray;
return ($rtn, $grp0, $grp1);
}
#------------------------------------------------------------------------------
# Extract image from file
# Inputs: 0) ExifTool object reference, 1) data offset (in file), 2) data length
# 3) [optional] tag name
# Returns: Reference to Image if specifically requested or "Binary data" message
# Returns undef if there was an error loading the image
sub ExtractImage($$$$)
{
my ($et, $offset, $len, $tag) = @_;
my $dataPt = \$$et{EXIF_DATA};
my $dataPos = $$et{EXIF_POS};
my $image;
# no image if length is zero, and don't try to extract binary from XMP file
return undef if not $len or $$et{FILE_TYPE} eq 'XMP';
# take data from EXIF block if possible
if (defined $dataPos and $offset>=$dataPos and $offset+$len<=$dataPos+length($$dataPt)) {
$image = substr($$dataPt, $offset-$dataPos, $len);
} else {
$image = $et->ExtractBinary($offset, $len, $tag);
return undef unless defined $image;
# patch for incorrect ThumbnailOffset in some Sony DSLR-A100 ARW images
if ($tag and $tag eq 'ThumbnailImage' and $$et{TIFF_TYPE} eq 'ARW' and
$$et{Model} eq 'DSLR-A100' and $offset < 0x10000 and
$image !~ /^(Binary data|\xff\xd8\xff)/)
{
my $try = $et->ExtractBinary($offset + 0x10000, $len, $tag);
if (defined $try and $try =~ /^\xff\xd8\xff/) {
$image = $try;
$$et{VALUE}{ThumbnailOffset} += 0x10000;
$et->Warn('Adjusted incorrect A100 ThumbnailOffset', 1);
}
}
}
return $et->ValidateImage(\$image, $tag);
}
#------------------------------------------------------------------------------
# Utility routine to return tag ID string for warnings
# Inputs: 0) Tag ID, 1) [optional] TagInfo ref
# Returns: "tag 0xXXXX NAME"
sub TagName($;$)
{
my ($tagID, $tagInfo) = @_;
my $tagName = $tagInfo ? ' '.$$tagInfo{Name} : '';
return sprintf('tag 0x%.4x%s', $tagID, $tagName);
}
#------------------------------------------------------------------------------
# Get class name of next IFD offset for HtmlDump output
# Inputs: 0) ExifTool ref, 1) current class ID
# Returns: 0) new IFD offset name, 1) new class ID including "Offset_" for new offset
# 2) new "Offset_" ID
sub NextOffsetName($;$)
{
my ($et, $id) = @_;
$$et{OffsetNum} = defined $$et{OffsetNum} ? $$et{OffsetNum} + 1 : 0;
my $offName = 'o' . $$et{OffsetNum};
my $sid = "Offset_$offName";
$id = (defined $id ? "$id " : '') . $sid;
return ($offName, $id, $sid);
}
#------------------------------------------------------------------------------
# Process EXIF directory
# Inputs: 0) ExifTool object reference
# 1) Reference to directory information hash
# 2) Pointer to tag table for this directory
# Returns: 1 on success, otherwise returns 0 and sets a Warning
sub ProcessExif($$$)
{
my ($et, $dirInfo, $tagTablePtr) = @_;
my $dataPt = $$dirInfo{DataPt};
my $dataPos = $$dirInfo{DataPos} || 0;
my $dataLen = $$dirInfo{DataLen};
my $dirStart = $$dirInfo{DirStart} || 0;
my $dirLen = $$dirInfo{DirLen} || $dataLen - $dirStart;
my $dirName = $$dirInfo{DirName};
my $base = $$dirInfo{Base} || 0;
my $firstBase = $base;
my $raf = $$dirInfo{RAF};
my $verbose = $et->Options('Verbose');
my $validate = $et->Options('Validate');
my $saveFormat = $et->Options('SaveFormat');
my $htmlDump = $$et{HTML_DUMP};
my $success = 1;
my ($tagKey, $dirSize, $makerAddr, $strEnc, %offsetInfo, $offName, $nextOffName);
my $inMakerNotes = $$tagTablePtr{GROUPS}{0} eq 'MakerNotes';
my $isExif = ($tagTablePtr eq \%Image::ExifTool::Exif::Main);
# set encoding to assume for strings
$strEnc = $et->Options('CharsetEXIF') if $$tagTablePtr{GROUPS}{0} eq 'EXIF';
# ignore non-standard EXIF while in strict MWG compatibility mode
if (($validate or $Image::ExifTool::MWG::strict) and $dirName eq 'IFD0' and
$isExif and $$et{FILE_TYPE} =~ /^(JPEG|TIFF|PSD)$/)
{
my $path = $et->MetadataPath();
unless ($path =~ /^(JPEG-APP1-IFD0|TIFF-IFD0|PSD-EXIFInfo-IFD0)$/) {
if ($Image::ExifTool::MWG::strict) {
$et->Warn("Ignored non-standard EXIF at $path");
return 0;
} else {
$et->Warn("Non-standard EXIF at $path", 1);
}
}
}
# mix htmlDump and Validate into verbose so we can test for all at once
$verbose = -1 if $htmlDump;
$verbose = -2 if $validate and not $verbose;
$dirName eq 'EXIF' and $dirName = $$dirInfo{DirName} = 'IFD0';
$$dirInfo{Multi} = 1 if $dirName =~ /^(IFD0|SubIFD)$/ and not defined $$dirInfo{Multi};
# get a more descriptive name for MakerNote sub-directories
my $dir = $$dirInfo{Name};
$dir = $dirName unless $dir and $inMakerNotes and $dir !~ /^MakerNote/;
my ($numEntries, $dirEnd);
if ($dirStart >= 0 and $dirStart <= $dataLen-2) {
# make sure data is large enough (patches bug in Olympus subdirectory lengths)
$numEntries = Get16u($dataPt, $dirStart);
$dirSize = 2 + 12 * $numEntries;
$dirEnd = $dirStart + $dirSize;
if ($dirSize > $dirLen) {
if (($verbose > 0 or $validate) and not $$dirInfo{SubIFD}) {
my $short = $dirSize - $dirLen;
$$et{INDENT} =~ s/..$//; # keep indent the same
$et->Warn("Short directory size for $dir (missing $short bytes)");
$$et{INDENT} .= '| ';
}
undef $dirSize if $dirEnd > $dataLen; # read from file if necessary
}
}
# read IFD from file if necessary
unless ($dirSize) {
$success = 0;
if ($raf) {
# read the count of entries in this IFD
my $offset = $dirStart + $dataPos;
my ($buff, $buf2);
if ($raf->Seek($offset + $base, 0) and $raf->Read($buff,2) == 2) {
my $len = 12 * Get16u(\$buff,0);
# also read next IFD pointer if available
if ($raf->Read($buf2, $len+4) >= $len) {
$buff .= $buf2;
$dataPt = $$dirInfo{DataPt} = \$buff;
$dataPos = $$dirInfo{DataPos} = $offset;
$dataLen = $$dirInfo{DataLen} = length $buff;
$dirStart = $$dirInfo{DirStart} = 0;
$dirLen = $$dirInfo{DirLen} = length $buff;
$success = 1;
}
}
}
if ($success) {
$numEntries = Get16u($dataPt, $dirStart);
} else {
$et->Warn("Bad $dir directory", $inMakerNotes);
return 0 unless $inMakerNotes and $dirLen >= 14 and $dirStart >= 0 and
$dirStart + $dirLen <= length($$dataPt);
$dirSize = $dirLen;
$numEntries = int(($dirSize - 2) / 12); # read what we can
Set16u($numEntries, $dataPt, $dirStart);
}
$dirSize = 2 + 12 * $numEntries;
$dirEnd = $dirStart + $dirSize;
}
$verbose > 0 and $et->VerboseDir($dirName, $numEntries);
my $bytesFromEnd = $dataLen - $dirEnd;
if ($bytesFromEnd < 4) {
unless ($bytesFromEnd==2 or $bytesFromEnd==0) {
$et->Warn("Illegal $dir directory size ($numEntries entries)");
return 0;
}
}
# fix base offset for maker notes if necessary
if (defined $$dirInfo{MakerNoteAddr}) {
$makerAddr = $$dirInfo{MakerNoteAddr};
delete $$dirInfo{MakerNoteAddr};
if (Image::ExifTool::MakerNotes::FixBase($et, $dirInfo)) {
$base = $$dirInfo{Base};
$dataPos = $$dirInfo{DataPos};
}
}
if ($htmlDump) {
$offName = $$dirInfo{OffsetName};
my $longName = $dir eq 'MakerNotes' ? ($$dirInfo{Name} || $dir) : $dir;
if (defined $makerAddr) {
my $hdrLen = $dirStart + $dataPos + $base - $makerAddr;
$et->HDump($makerAddr, $hdrLen, "MakerNotes header", $longName) if $hdrLen > 0;
}
unless ($$dirInfo{NoDumpEntryCount}) {
$et->HDump($dirStart + $dataPos + $base, 2, "$longName entries",
"Entry count: $numEntries", undef, $offName);
}
my $tip;
my $id = $offName;
if ($bytesFromEnd >= 4) {
my $nxt = ($dir =~ /^(.*?)(\d+)$/) ? $1 . ($2 + 1) : 'Next IFD';
my $off = Get32u($dataPt, $dirEnd);
$tip = sprintf("$nxt offset: 0x%.4x", $off);
($nextOffName, $id) = NextOffsetName($et, $offName) if $off;
}
$et->HDump($dirEnd + $dataPos + $base, 4, "Next IFD", $tip, 0, $id);
}
# patch for Canon EOS 40D firmware 1.0.4 bug (incorrect directory counts)
# (must do this before parsing directory or CameraSettings offset will be suspicious)
if ($inMakerNotes and $$et{Model} eq 'Canon EOS 40D' and $numEntries) {
my $entry = $dirStart + 2 + 12 * ($numEntries - 1);
my $fmt = Get16u($dataPt, $entry + 2);
if ($fmt < 1 or $fmt > 13) {
$et->HDump($entry+$dataPos+$base,12,"[invalid IFD entry]",
"Bad format type: $fmt", 1, $offName);
# adjust the number of directory entries
--$numEntries;
$dirEnd -= 12;
}
}
# make sure that Compression and SubfileType are defined for this IFD (for Condition's)
$$et{Compression} = $$et{SubfileType} = '';
# loop through all entries in an EXIF directory (IFD)
my ($index, $valEnd, $offList, $offHash, $mapFmt, @valPos);
$mapFmt = $$tagTablePtr{VARS}{MAP_FORMAT} if $$tagTablePtr{VARS};
my ($warnCount, $lastID) = (0, -1);
for ($index=0; $index<$numEntries; ++$index) {
if ($warnCount > 10) {
$et->Warn("Too many warnings -- $dir parsing aborted", 2) and return 0;
}
my $entry = $dirStart + 2 + 12 * $index;
my $tagID = Get16u($dataPt, $entry);
my $format = Get16u($dataPt, $entry+2);
my $count = Get32u($dataPt, $entry+4);
if ($format < 1 or $format > 13) {
if ($mapFmt and $$mapFmt{$format}) {
$format = $$mapFmt{$format};
} else {
$et->HDump($entry+$dataPos+$base,12,"[invalid IFD entry]",
"Bad format type: $format", 1, $offName);
# warn unless the IFD was just padded with zeros
if ($format or $validate) {
$et->Warn("Bad format ($format) for $dir entry $index", $inMakerNotes);
++$warnCount;
}
# assume corrupted IFD if this is our first entry (except Sony ILCE-7M2 firmware 1.21)
return 0 unless $index or $$et{Model} eq 'ILCE-7M2';
next;
}
}
my $formatStr = $formatName[$format]; # get name of this format
my $valueDataPt = $dataPt;
my $valueDataPos = $dataPos;
my $valueDataLen = $dataLen;
my $valuePtr = $entry + 8; # pointer to value within $$dataPt
my $tagInfo = $et->GetTagInfo($tagTablePtr, $tagID);
my ($origFormStr, $bad, $rational, $subOffName);
# save the EXIF format codes if requested
$$et{SaveFormat}{$saveFormat = $formatStr} = 1 if $saveFormat;
# hack to patch incorrect count in Kodak SubIFD3 tags
if ($count < 2 and ref $$tagTablePtr{$tagID} eq 'HASH' and $$tagTablePtr{$tagID}{FixCount}) {
$offList or ($offList, $offHash) = GetOffList($dataPt, $dirStart, $dataPos,
$numEntries, $tagTablePtr);
my $i = $$offHash{Get32u($dataPt, $valuePtr)};
if (defined $i and $i < $#$offList) {
my $oldCount = $count;
$count = int(($$offList[$i+1] - $$offList[$i]) / $formatSize[$format]);
$origFormStr = $formatName[$format] . '[' . $oldCount . ']' if $oldCount != $count;
}
}
$validate and not $inMakerNotes and Image::ExifTool::Validate::ValidateExif(
$et, $tagTablePtr, $tagID, $tagInfo, $lastID, $dir, $count, $formatStr);
my $size = $count * $formatSize[$format];
my $readSize = $size;
if ($size > 4) {
if ($size > 0x7fffffff and (not $tagInfo or not $$tagInfo{ReadFromRAF})) {
$et->Warn(sprintf("Invalid size (%u) for %s %s",$size,$dir,TagName($tagID,$tagInfo)), $inMakerNotes);
++$warnCount;
next;
}
$valuePtr = Get32u($dataPt, $valuePtr);
if ($validate and not $inMakerNotes) {
my $tagName = TagName($tagID, $tagInfo);
$et->Warn("Odd offset for $dir $tagName", 1) if $valuePtr & 0x01;
if ($valuePtr < 8 || ($valuePtr + $size > length($$dataPt) and
$valuePtr + $size > $$et{VALUE}{FileSize}))
{
$et->Warn("Invalid offset for $dir $tagName");
++$warnCount;
next;
}
if ($valuePtr + $size > $dirStart + $dataPos and $valuePtr < $dirEnd + $dataPos + 4) {
$et->Warn("Value for $dir $tagName overlaps IFD");
}
foreach (@valPos) {
next if $$_[0] >= $valuePtr + $size or $$_[0] + $$_[1] <= $valuePtr;
$et->Warn("Value for $dir $tagName overlaps $$_[2]");
}
push @valPos, [ $valuePtr, $size, $tagName ];
}
# fix valuePtr if necessary
if ($$dirInfo{FixOffsets}) {
my $wFlag;
$valEnd or $valEnd = $dataPos + $dirEnd + 4;
#### eval FixOffsets ($valuePtr, $valEnd, $size, $tagID, $wFlag)
eval $$dirInfo{FixOffsets};
}
my $suspect;
# offset shouldn't point into TIFF header
$valuePtr < 8 and not $$dirInfo{ZeroOffsetOK} and $suspect = $warnCount;
# convert offset to pointer in $$dataPt
if ($$dirInfo{EntryBased} or (ref $$tagTablePtr{$tagID} eq 'HASH' and
$$tagTablePtr{$tagID}{EntryBased}))
{
$valuePtr += $entry;
} else {
$valuePtr -= $dataPos;
}
# value shouldn't overlap our directory
$suspect = $warnCount if $valuePtr < $dirEnd and $valuePtr+$size > $dirStart;
# load value from file if necessary
if ($valuePtr < 0 or $valuePtr+$size > $dataLen) {
# get value by seeking in file if we are allowed
my $buff;
if ($raf) {
# avoid loading large binary data unless necessary
while ($size > BINARY_DATA_LIMIT) {
if ($tagInfo) {
# make large unknown blocks binary data
$$tagInfo{Binary} = 1 if $$tagInfo{Unknown};
last unless $$tagInfo{Binary}; # must read non-binary data
last if $$tagInfo{SubDirectory};
my $lcTag = lc($$tagInfo{Name});
if ($$et{OPTIONS}{Binary} and
not $$et{EXCL_TAG_LOOKUP}{$lcTag})
{
# read binary data if specified unless tagsFromFile won't use it
last unless $$et{TAGS_FROM_FILE} and $$tagInfo{Protected};
}
# must read if tag is specified by name
last if $$et{REQ_TAG_LOOKUP}{$lcTag};
} else {
# must read value if needed for a condition
last if defined $tagInfo;
}
# (note: changing the value without changing $size will cause
# a warning in the verbose output, but we need to maintain the
# proper size for the htmlDump, so we can't change this)
$buff = "Binary data $size bytes";
$readSize = length $buff;
last;
}
# read from file if necessary
unless (defined $buff) {
my $wrn;
my $readFromRAF = ($tagInfo and $$tagInfo{ReadFromRAF});
if (not $raf->Seek($base + $valuePtr + $dataPos, 0)) {
$wrn = "Invalid offset for $dir entry $index";
} elsif ($readFromRAF and $size > BINARY_DATA_LIMIT and
not $$et{REQ_TAG_LOOKUP}{lc $$tagInfo{Name}})
{
$buff = "$$tagInfo{Name} data $size bytes";
$readSize = length $buff;
} elsif ($raf->Read($buff,$size) != $size) {
$wrn = "Error reading value for $dir entry $index";
} elsif ($readFromRAF) {
# seek back to the start of the value
$raf->Seek($base + $valuePtr + $dataPos, 0);
}
if ($wrn) {
$et->Warn($wrn, $inMakerNotes);
return 0 unless $inMakerNotes or $htmlDump;
++$warnCount;
$buff = '' unless defined $buff;
$readSize = length $buff;
$bad = 1;
}
}
$valueDataLen = length $buff;
$valueDataPt = \$buff;
$valueDataPos = $valuePtr + $dataPos;
$valuePtr = 0;
} else {
my ($tagStr, $tmpInfo, $leicaTrailer);
if ($tagInfo) {
$tagStr = $$tagInfo{Name};
$leicaTrailer = $$tagInfo{LeicaTrailer};
} elsif (defined $tagInfo) {
$tmpInfo = $et->GetTagInfo($tagTablePtr, $tagID, \ '', $formatStr, $count);
if ($tmpInfo) {
$tagStr = $$tmpInfo{Name};
$leicaTrailer = $$tmpInfo{LeicaTrailer};
}
}
if ($tagInfo and $$tagInfo{ChangeBase}) {
# adjust base offset for this tag only
#### eval ChangeBase ($dirStart,$dataPos)
my $newBase = eval $$tagInfo{ChangeBase};
$valuePtr += $newBase;
}
$tagStr or $tagStr = sprintf("tag 0x%.4x",$tagID);
# allow PreviewImage to run outside EXIF data
if ($tagStr eq 'PreviewImage' and $$et{RAF}) {
my $pos = $$et{RAF}->Tell();
$buff = $et->ExtractBinary($base + $valuePtr + $dataPos, $size, 'PreviewImage');
$$et{RAF}->Seek($pos, 0);
$valueDataPt = \$buff;
$valueDataPos = $valuePtr + $dataPos;
$valueDataLen = $size;
$valuePtr = 0;
} elsif ($leicaTrailer and $$et{RAF}) {
if ($verbose > 0) {
$et->VPrint(0, "$$et{INDENT}$index) $tagStr --> (outside APP1 segment)\n");
}
if ($et->Options('FastScan')) {
$et->Warn('Ignored Leica MakerNote trailer');
} else {
require Image::ExifTool::Fixup;
$$et{LeicaTrailer} = {
TagInfo => $tagInfo || $tmpInfo,
Offset => $base + $valuePtr + $dataPos,
Size => $size,
Fixup => new Image::ExifTool::Fixup,
};
}
} else {
$et->Warn("Bad offset for $dir $tagStr", $inMakerNotes);
++$warnCount;
}
unless (defined $buff) {
$valueDataPt = '';
$valueDataPos = $valuePtr + $dataPos;
$valueDataLen = 0;
$valuePtr = 0;
$bad = 1;
}
}
}
# warn about suspect offsets if they didn't already cause another warning
if (defined $suspect and $suspect == $warnCount) {
my $tagStr = $tagInfo ? $$tagInfo{Name} : sprintf('tag 0x%.4x', $tagID);
if ($et->Warn("Suspicious $dir offset for $tagStr", $inMakerNotes)) {
++$warnCount;
next unless $verbose;
}
}
}
# treat single unknown byte as int8u
$formatStr = 'int8u' if $format == 7 and $count == 1;
my ($val, $subdir, $wrongFormat);
if ($tagID > 0xf000 and $isExif) {
my $oldInfo = $$tagTablePtr{$tagID};
if ((not $oldInfo or (ref $oldInfo eq 'HASH' and $$oldInfo{Condition} and
not $$oldInfo{PSRaw})) and not $bad)
{
# handle special case of Photoshop RAW tags (0xfde8-0xfe58)
# --> generate tags from the value if possible
$val = ReadValue($valueDataPt,$valuePtr,$formatStr,$count,$readSize);
if (defined $val and $val =~ /(.*): (.*)/) {
my $tag = $1;
$val = $2;
$tag =~ s/'s//; # remove 's (so "Owner's Name" becomes "OwnerName")
$tag =~ tr/a-zA-Z0-9_//cd; # remove unknown characters
if ($tag) {
$tagInfo = {
Name => $tag,
Condition => '$$self{TIFF_TYPE} ne "DCR"',
ValueConv => '$_=$val;s/^.*: //;$_', # remove descr
PSRaw => 1, # (just as flag to avoid adding this again)
};
AddTagToTable($tagTablePtr, $tagID, $tagInfo);
# generate conditional list if a conditional tag already existed
$$tagTablePtr{$tagID} = [ $oldInfo, $tagInfo ] if $oldInfo;
}
}
}
}
if (defined $tagInfo and not $tagInfo) {
if ($bad) {
undef $tagInfo;
} else {
# GetTagInfo() required the value for a Condition
my $tmpVal = substr($$valueDataPt, $valuePtr, $readSize < 128 ? $readSize : 128);
# (use original format name in this call -- $formatStr may have been changed to int8u)
$tagInfo = $et->GetTagInfo($tagTablePtr, $tagID, \$tmpVal,
$formatName[$format], $count);
}
}
# make sure we are handling the 'ifd' format properly
if (($format == 13 or $format == 18) and (not $tagInfo or not $$tagInfo{SubIFD})) {
my $str = sprintf('%s tag 0x%.4x IFD format not handled', $dirName, $tagID);
$et->Warn($str, $inMakerNotes);
}
if (defined $tagInfo) {
my $readFormat = $$tagInfo{Format};
$subdir = $$tagInfo{SubDirectory};
# unless otherwise specified, all SubDirectory data except
# EXIF SubIFD offsets should be unformatted
$readFormat = 'undef' if $subdir and not $$tagInfo{SubIFD} and not $readFormat;
# override EXIF format if specified
if ($readFormat) {
$formatStr = $readFormat;
my $newNum = $formatNumber{$formatStr};
if ($newNum and $newNum != $format) {
$origFormStr = $formatName[$format] . '[' . $count . ']';
$format = $newNum;
$size = $readSize = $$tagInfo{FixedSize} if $$tagInfo{FixedSize};
# adjust number of items for new format size
$count = int($size / $formatSize[$format]);
}
}
# verify that offset-type values are integral
if (($$tagInfo{IsOffset} or $$tagInfo{SubIFD}) and not $intFormat{$formatStr}) {
$et->Warn(sprintf('Wrong format (%s) for %s 0x%.4x %s',$formatStr,$dir,$tagID,$$tagInfo{Name}));
if ($validate) {
$$et{WrongFormat}{"$dir:$$tagInfo{Name}"} = 1;
$offsetInfo{$tagID} = [ $tagInfo, '' ];
}
next unless $verbose;
$wrongFormat = 1;
}
} else {
next unless $verbose;
}
unless ($bad) {
# limit maximum length of data to reformat
# (avoids long delays when processing some corrupted files)
my $warned;
if ($count > 100000 and $formatStr !~ /^(undef|string|binary)$/) {
my $tagName = $tagInfo ? $$tagInfo{Name} : sprintf('tag 0x%.4x', $tagID);
# (count of 196608 is typical for ColorMap)
if ($tagName ne 'TransferFunction' or $count != 196608) {
my $minor = $count > 2000000 ? 0 : 2;
if ($et->Warn("Ignoring $dirName $tagName with excessive count", $minor)) {
next unless $$et{OPTIONS}{HtmlDump};
$warned = 1;
}
}
}
if ($count > 500 and $formatStr !~ /^(undef|string|binary)$/ and
(not $tagInfo or $$tagInfo{LongBinary} or $warned) and not $$et{OPTIONS}{IgnoreMinorErrors})
{
$et->WarnOnce('Not decoding some large array(s). Ignore minor errors to decode', 2) unless $warned;
next if $$et{TAGS_FROM_FILE}; # don't generate bogus value when copying tags
$val = "(large array of $count $formatStr values)";
} else {
# convert according to specified format
$val = ReadValue($valueDataPt,$valuePtr,$formatStr,$count,$readSize,\$rational);
# re-code if necessary
$val = $et->Decode($val, $strEnc) if $strEnc and $formatStr eq 'string' and defined $val;
}
}
if ($verbose) {
my $tval = $val;
# also show as a rational
$tval .= " ($rational)" if defined $rational;
if ($htmlDump) {
my ($tagName, $colName);
if ($tagID == 0x927c and $dirName eq 'ExifIFD') {
$tagName = 'MakerNotes';
} elsif ($tagInfo) {
$tagName = $$tagInfo{Name};
} else {
$tagName = sprintf("Tag 0x%.4x",$tagID);
}
my $dname = sprintf("${dir}-%.2d", $index);
# build our tool tip
$size < 0 and $size = $count * $formatSize[$format];
my $fstr = "$formatName[$format]\[$count]";
$fstr = "$origFormStr read as $fstr" if $origFormStr and $origFormStr ne $fstr;
$fstr .= ' <-- WRONG' if $wrongFormat;
my $tip = sprintf("Tag ID: 0x%.4x\n", $tagID) .
"Format: $fstr\nSize: $size bytes\n";
if ($size > 4) {
my $offPt = Get32u($dataPt,$entry+8);
# (test this with ../pics/{CanonEOS-1D_XMarkIII.hif,PanasonicDC-G9.rw2})
my $actPt = $valuePtr + $valueDataPos + $base - ($$et{EXIF_POS} || 0) + ($$et{BASE_FUDGE} || 0);
$tip .= sprintf("Value offset: 0x%.4x\n", $offPt);
# highlight tag name (red for bad size)
my $style = ($bad or not defined $tval) ? 'V' : 'H';
if ($actPt != $offPt) {
$tip .= sprintf("Actual offset: 0x%.4x\n", $actPt);
my $sign = $actPt < $offPt ? '-' : '';
$tip .= sprintf("Offset base: ${sign}0x%.4x\n", abs($actPt - $offPt));
$style = 'F' if $style eq 'H'; # purple for different offsets
}
if ($$et{EXIF_POS} and not $$et{BASE_FUDGE}) {
$tip .= sprintf("File offset: 0x%.4x\n", $actPt + $$et{EXIF_POS})
}
$colName = "<span class=$style>$tagName</span>";
$colName .= ' <span class=V>(odd)</span>' if $offPt & 0x01;
} else {
$colName = $tagName;
}
$colName .= ' <span class=V>(err)</span>' if $wrongFormat;
$colName .= ' <span class=V>(seq)</span>' if $tagID <= $lastID and not $inMakerNotes;
$lastID = $tagID;
if (not defined $tval) {
$tval = '<bad size/offset>';
} else {
$tval = substr($tval,0,28) . '[...]' if length($tval) > 32;
if ($formatStr =~ /^(string|undef|binary)/) {
# translate non-printable characters
$tval =~ tr/\x00-\x1f\x7f-\xff/./;
} elsif ($tagInfo and Image::ExifTool::IsInt($tval)) {
if ($$tagInfo{IsOffset} or $$tagInfo{SubIFD}) {
$tval = sprintf('0x%.4x', $tval);
my $actPt = $val + $base - ($$et{EXIF_POS} || 0) + ($$et{BASE_FUDGE} || 0);
if ($actPt != $val) {
$tval .= sprintf("\nActual offset: 0x%.4x", $actPt);
my $sign = $actPt < $val ? '-' : '';
$tval .= sprintf("\nOffset base: ${sign}0x%.4x", abs($actPt - $val));
}
} elsif ($$tagInfo{PrintHex}) {
$tval = sprintf('0x%x', $tval);
}
}
}
$tip .= "Value: $tval";
my $id = $offName;
my $sid;
($subOffName, $id, $sid) = NextOffsetName($et, $offName) if $tagInfo and $$tagInfo{SubIFD};
$et->HDump($entry+$dataPos+$base, 12, "$dname $colName", $tip, 1, $id);
next if $valueDataLen < 0; # don't process bad pointer entry
if ($size > 4) {
my $exifDumpPos = $valuePtr + $valueDataPos + $base;
my $flag = 0;
if ($subdir) {
if ($$tagInfo{MakerNotes}) {
$flag = 0x04;
} elsif ($$tagInfo{NestedHtmlDump}) {
$flag = $$tagInfo{NestedHtmlDump} == 2 ? 0x10 : 0x04;
}
}
# add value data block (underlining maker notes data)
$et->HDump($exifDumpPos,$size,"$tagName value",'SAME', $flag, $sid);
}
} else {
if ($tagID <= $lastID and not $inMakerNotes) {
my $str = $tagInfo ? ' '.$$tagInfo{Name} : '';
if ($tagID == $lastID) {
$et->Warn(sprintf('Duplicate tag 0x%.4x%s in %s', $tagID, $str, $dirName));
} else {
$et->Warn(sprintf('Tag ID 0x%.4x%s out of sequence in %s', $tagID, $str, $dirName));
}
}
$lastID = $tagID;
if ($verbose > 0) {
my $fstr = $formatName[$format];
$fstr = "$origFormStr read as $fstr" if $origFormStr;
$et->VerboseInfo($tagID, $tagInfo,
Table => $tagTablePtr,
Index => $index,
Value => $tval,
DataPt => $valueDataPt,
DataPos => $valueDataPos + $base,
Size => $size,
Start => $valuePtr,
Format => $fstr,
Count => $count,
);
}
}
next if not $tagInfo or $wrongFormat;
}
next unless defined $val;
#..............................................................................
# Handle SubDirectory tag types
#
if ($subdir) {
# don't process empty subdirectories
unless ($size) {
unless ($$tagInfo{MakerNotes} or $inMakerNotes) {
$et->Warn("Empty $$tagInfo{Name} data", 1);
}
next;
}
my (@values, $newTagTable, $dirNum, $newByteOrder, $invalid);
my $tagStr = $$tagInfo{Name};
if ($$subdir{MaxSubdirs}) {
@values = split ' ', $val;
# limit the number of subdirectories we parse
my $over = @values - $$subdir{MaxSubdirs};
if ($over > 0) {
$et->Warn("Ignoring $over $tagStr directories");
splice @values, $$subdir{MaxSubdirs};
}
$val = shift @values;
}
if ($$subdir{TagTable}) {
$newTagTable = GetTagTable($$subdir{TagTable});
$newTagTable or warn("Unknown tag table $$subdir{TagTable}"), next;
} else {
$newTagTable = $tagTablePtr; # use existing table
}
# loop through all sub-directories specified by this tag
for ($dirNum=0; ; ++$dirNum) {
my $subdirBase = $base;
my $subdirDataPt = $valueDataPt;
my $subdirDataPos = $valueDataPos;
my $subdirDataLen = $valueDataLen;
my $subdirStart = $valuePtr;
if (defined $$subdir{Start}) {
# set local $valuePtr relative to file $base for eval
my $valuePtr = $subdirStart + $subdirDataPos;
#### eval Start ($valuePtr, $val)
my $newStart = eval($$subdir{Start});
unless (Image::ExifTool::IsInt($newStart)) {
$et->Warn("Bad value for $tagStr");
last;
}
# convert back to relative to $subdirDataPt
$newStart -= $subdirDataPos;
# adjust directory size if necessary
unless ($$tagInfo{SubIFD} or $$subdir{BadOffset}) {
$size -= $newStart - $subdirStart;
}
$subdirStart = $newStart;
}
# this is a pain, but some maker notes are always a specific
# byte order, regardless of the byte order of the file
my $oldByteOrder = GetByteOrder();
$newByteOrder = $$subdir{ByteOrder};
if ($newByteOrder) {
if ($newByteOrder =~ /^Little/i) {
$newByteOrder = 'II';
} elsif ($newByteOrder =~ /^Big/i) {
$newByteOrder = 'MM';
} elsif ($$subdir{OffsetPt}) {
undef $newByteOrder;
warn "Can't have variable byte ordering for SubDirectories using OffsetPt";
last;
} elsif ($subdirStart + 2 <= $subdirDataLen) {
# attempt to determine the byte ordering by checking
# the number of directory entries. This is an int16u
# that should be a reasonable value.
my $num = Get16u($subdirDataPt, $subdirStart);
if ($num & 0xff00 and ($num>>8) > ($num&0xff)) {
# This looks wrong, we shouldn't have this many entries
my %otherOrder = ( II=>'MM', MM=>'II' );
$newByteOrder = $otherOrder{$oldByteOrder};
} else {
$newByteOrder = $oldByteOrder;
}
}
} else {
$newByteOrder = $oldByteOrder;
}
# set base offset if necessary
if ($$subdir{Base}) {
# calculate subdirectory start relative to $base for eval
my $start = $subdirStart + $subdirDataPos;
#### eval Base ($start,$base)
$subdirBase = eval($$subdir{Base}) + $base;
}
# add offset to the start of the directory if necessary
if ($$subdir{OffsetPt}) {
#### eval OffsetPt ($valuePtr)
my $pos = eval $$subdir{OffsetPt};
if ($pos + 4 > $subdirDataLen) {
$et->Warn("Bad $tagStr OffsetPt");
last;
}
SetByteOrder($newByteOrder);
$subdirStart += Get32u($subdirDataPt, $pos);
SetByteOrder($oldByteOrder);
}
if ($subdirStart < 0 or $subdirStart + 2 > $subdirDataLen) {
# convert $subdirStart back to a file offset
if ($raf) {
# reset SubDirectory buffer (we will load it later)
my $buff = '';
$subdirDataPt = \$buff;
$subdirDataLen = $size = length $buff;
} else {
my $msg = "Bad $tagStr SubDirectory start";
if ($verbose > 0) {
if ($subdirStart < 0) {
$msg .= " (directory start $subdirStart is before EXIF start)";
} else {
my $end = $subdirStart + $size;
$msg .= " (directory end is $end but EXIF size is only $subdirDataLen)";
}
}
$et->Warn($msg, $inMakerNotes);
last;
}
}
# must update subdirDataPos if $base changes for this subdirectory
$subdirDataPos += $base - $subdirBase;
# build information hash for new directory
my %subdirInfo = (
Name => $tagStr,
Base => $subdirBase,
DataPt => $subdirDataPt,
DataPos => $subdirDataPos,
DataLen => $subdirDataLen,
DirStart => $subdirStart,
DirLen => $size,
RAF => $raf,
Parent => $dirName,
DirName => $$subdir{DirName},
FixBase => $$subdir{FixBase},
FixOffsets => $$subdir{FixOffsets},
EntryBased => $$subdir{EntryBased},
TagInfo => $tagInfo,
SubIFD => $$tagInfo{SubIFD},
Subdir => $subdir,
OffsetName => $subOffName,
);
# (remember: some cameras incorrectly write maker notes in IFD0)
if ($$tagInfo{MakerNotes}) {
# don't parse makernotes if FastScan > 1
my $fast = $et->Options('FastScan');
last if $fast and $fast > 1;
$subdirInfo{MakerNoteAddr} = $valuePtr + $valueDataPos + $base;
$subdirInfo{NoFixBase} = 1 if defined $$subdir{Base};
}
# set directory IFD name from group name of family 1 if it exists,
# unless the tag is writable as a block in which case group 1 may
# have been set automatically
if ($$tagInfo{Groups} and not $$tagInfo{Writable}) {
$subdirInfo{DirName} = $$tagInfo{Groups}{1};
# number multiple subdirectories
$subdirInfo{DirName} =~ s/\d*$/$dirNum/ if $dirNum;
}
SetByteOrder($newByteOrder); # set byte order for this subdir
# validate the subdirectory if necessary
my $dirData = $subdirDataPt; # set data pointer to be used in eval
#### eval Validate ($val, $dirData, $subdirStart, $size)
my $ok = 0;
if (defined $$subdir{Validate} and not eval $$subdir{Validate}) {
$et->Warn("Invalid $tagStr data", $inMakerNotes);
$invalid = 1;
} else {
if (not $subdirInfo{DirName} and $inMakerNotes) {
$subdirInfo{DirName} = $$tagInfo{Name};
}
# process the subdirectory
$ok = $et->ProcessDirectory(\%subdirInfo, $newTagTable, $$subdir{ProcessProc});
}
# print debugging information if there were errors
if (not $ok and $verbose > 1 and $subdirStart != $valuePtr) {
my $out = $et->Options('TextOut');
printf $out "%s (SubDirectory start = 0x%x)\n", $$et{INDENT}, $subdirStart;
}
SetByteOrder($oldByteOrder); # restore original byte swapping
@values or last;
$val = shift @values; # continue with next subdir
}
my $doMaker = $et->Options('MakerNotes');
next unless $doMaker or $$et{REQ_TAG_LOOKUP}{lc($tagStr)} or $$tagInfo{BlockExtract};
# extract as a block if specified
if ($$tagInfo{MakerNotes}) {
# save maker note byte order (if it was significant and valid)
if ($$subdir{ByteOrder} and not $invalid) {
$$et{MAKER_NOTE_BYTE_ORDER} =
defined ($$et{UnknownByteOrder}) ?
$$et{UnknownByteOrder} : $newByteOrder;
}
if ($doMaker and $doMaker eq '2') {
# extract maker notes without rebuilding (no fixup information)
delete $$et{MAKER_NOTE_FIXUP};
} elsif (not $$tagInfo{NotIFD} or $$tagInfo{IsPhaseOne}) {
# this is a pain, but we must rebuild EXIF-type maker notes to
# include all the value data if data was outside the maker notes
my %makerDirInfo = (
Name => $tagStr,
Base => $base,
DataPt => $valueDataPt,
DataPos => $valueDataPos,
DataLen => $valueDataLen,
DirStart => $valuePtr,
DirLen => $size,
RAF => $raf,
Parent => $dirName,
DirName => 'MakerNotes',
FixOffsets => $$subdir{FixOffsets},
TagInfo => $tagInfo,
);
my $val2;
if ($$tagInfo{IsPhaseOne}) {
$$et{DropTags} = 1;
$val2 = Image::ExifTool::PhaseOne::WritePhaseOne($et, \%makerDirInfo, $newTagTable);
delete $$et{DropTags};
} else {
$makerDirInfo{FixBase} = 1 if $$subdir{FixBase};
# rebuild maker notes (creates $$et{MAKER_NOTE_FIXUP})
$val2 = RebuildMakerNotes($et, \%makerDirInfo, $newTagTable);
}
if (defined $val2) {
$val = $val2;
} elsif ($size > 4) {
$et->Warn('Error rebuilding maker notes (may be corrupt)');
}
}
} else {
# extract this directory as a block if specified
next unless $$tagInfo{Writable};
}
}
#..............................................................................
# convert to absolute offsets if this tag is an offset
#### eval IsOffset ($val, $et)
if ($$tagInfo{IsOffset} and eval $$tagInfo{IsOffset}) {
my $offsetBase = $$tagInfo{IsOffset} eq '2' ? $firstBase : $base;
$offsetBase += $$et{BASE};
# handle offsets which use a wrong base (Minolta A200)
if ($$tagInfo{WrongBase}) {
my $self = $et;
#### eval WrongBase ($self)
$offsetBase += eval $$tagInfo{WrongBase} || 0;
}
my @vals = split(' ',$val);
foreach $val (@vals) {
$val += $offsetBase;
}
$val = join(' ', @vals);
}
if ($validate) {
if ($$tagInfo{OffsetPair}) {
$offsetInfo{$tagID} = [ $tagInfo, $val ];
} elsif ($saveForValidate{$tagID} and $isExif) {
$offsetInfo{$tagID} = $val;
}
}
# save the value of this tag
$tagKey = $et->FoundTag($tagInfo, $val);
if (defined $tagKey) {
# set the group 1 name for tags in specified tables
$et->SetGroup($tagKey, $dirName) if $$tagTablePtr{SET_GROUP1};
# save original components of rational numbers (used when copying)
$$et{RATIONAL}{$tagKey} = $rational if defined $rational;
$$et{TAG_EXTRA}{$tagKey}{G6} = $saveFormat if $saveFormat;
}
}
# validate image data offsets for this IFD
if ($validate and %offsetInfo) {
Image::ExifTool::Validate::ValidateOffsetInfo($et, \%offsetInfo, $$dirInfo{DirName}, $inMakerNotes)
}
# scan for subsequent IFD's if specified
if ($$dirInfo{Multi} and $bytesFromEnd >= 4) {
# use same directory information for trailing directory,
# but change the start location (ProcessDirectory will
# test to make sure we don't reprocess the same dir twice)
my %newDirInfo = %$dirInfo;
$newDirInfo{Multi} = 0; # prevent recursion
$newDirInfo{OffsetName} = $nextOffName;
$$et{INDENT} =~ s/..$//; # keep indent the same
for (;;) {
my $offset = Get32u($dataPt, $dirEnd) or last;
$newDirInfo{DirStart} = $offset - $dataPos;
# increment IFD number
my $ifdNum = $newDirInfo{DirName} =~ s/(\d+)$// ? $1 : 0;
$newDirInfo{DirName} .= $ifdNum + 1;
# must validate SubIFD1 because the nextIFD pointer is invalid for some RAW formats
if ($newDirInfo{DirName} ne 'SubIFD1' or ValidateIFD(\%newDirInfo)) {
my $cur = pop @{$$et{PATH}};
$et->ProcessDirectory(\%newDirInfo, $tagTablePtr) or $success = 0;
push @{$$et{PATH}}, $cur;
if ($success and $newDirInfo{BytesFromEnd} >= 4) {
$dataPt = $newDirInfo{DataPt};
$dataPos = $newDirInfo{DataPos};
$dirEnd = $newDirInfo{DirEnd};
next;
}
} elsif ($verbose or $$et{TIFF_TYPE} eq 'TIFF') {
$et->Warn('Ignored bad IFD linked from SubIFD');
}
last;
}
} elsif (defined $$dirInfo{Multi}) {
# return necessary parameters for parsing next IFD
$$dirInfo{DirEnd} = $dirEnd;
$$dirInfo{OffsetName} = $nextOffName;
$$dirInfo{BytesFromEnd} = $bytesFromEnd;
}
return $success;
}
1; # end
__END__
=head1 NAME
Image::ExifTool::Exif - Read EXIF/TIFF meta information
=head1 SYNOPSIS
This module is required by Image::ExifTool.
=head1 DESCRIPTION
This module contains routines required by Image::ExifTool for processing
EXIF and TIFF meta information.
=head1 AUTHOR
Copyright 2003-2022, Phil Harvey (philharvey66 at gmail.com)
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
=head1 REFERENCES
=over 4
=item L<http://www.exif.org/Exif2-2.PDF>
=item L<http://www.cipa.jp/std/documents/e/DC-008-2012_E.pdf>
=item L<http://partners.adobe.com/asn/developer/pdfs/tn/TIFF6.pdf>
=item L<http://partners.adobe.com/public/developer/en/tiff/TIFFPM6.pdf>
=item L<http://www.adobe.com/products/dng/pdfs/dng_spec.pdf>
=item L<http://www.awaresystems.be/imaging/tiff/tifftags.html>
=item L<http://www.remotesensing.org/libtiff/TIFFTechNote2.html>
=item L<http://www.exif.org/dcf.PDF>
=item L<http://park2.wakwak.com/~tsuruzoh/Computer/Digicams/exif-e.html>
=item L<http://www.fine-view.com/jp/lab/doc/ps6ffspecsv2.pdf>
=item L<http://www.ozhiker.com/electronics/pjmt/jpeg_info/meta.html>
=item L<http://hul.harvard.edu/jhove/tiff-tags.html>
=item L<http://www.microsoft.com/whdc/xps/wmphoto.mspx>
=item L<http://www.asmail.be/msg0054681802.html>
=item L<http://crousseau.free.fr/imgfmt_raw.htm>
=item L<http://www.cybercom.net/~dcoffin/dcraw/>
=item L<http://www.digitalpreservation.gov/formats/content/tiff_tags.shtml>
=item L<http://community.roxen.com/developers/idocs/rfc/rfc3949.html>
=item L<http://tools.ietf.org/html/draft-ietf-fax-tiff-fx-extension1-01>
=item L<http://wwwimages.adobe.com/www.adobe.com/content/dam/Adobe/en/devnet/cinemadng/pdfs/CinemaDNG_Format_Specification_v1_1.pdf>
=item L<http://geotiff.maptools.org/spec/geotiffhome.html>
=back
=head1 ACKNOWLEDGEMENTS
Thanks to Jeremy Brown for the 35efl tags, and Matt Madrid for his help with
the XP character code conversions.
=head1 SEE ALSO
L<Image::ExifTool::TagNames/EXIF Tags>,
L<Image::ExifTool(3pm)|Image::ExifTool>
=cut
| 36.914937 | 202 | 0.479637 |
edb6c6c042b1f2f1513672dc800dec5847e50ae6 | 1,355 | pm | Perl | auto-lib/Paws/MediaLive/ResourceConflict.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/MediaLive/ResourceConflict.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/MediaLive/ResourceConflict.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z | # Generated by default/object.tt
package Paws::MediaLive::ResourceConflict;
use Moose;
has Message => (is => 'ro', isa => 'Str', request_name => 'message', traits => ['NameInRequest']);
1;
### main pod documentation begin ###
=head1 NAME
Paws::MediaLive::ResourceConflict
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::MediaLive::ResourceConflict object:
$service_obj->Method(Att1 => { Message => $value, ..., Message => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::MediaLive::ResourceConflict object:
$result = $service_obj->Method(...);
$result->Att1->Message
=head1 DESCRIPTION
Placeholder documentation for ResourceConflict
=head1 ATTRIBUTES
=head2 Message => Str
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::MediaLive>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 22.966102 | 104 | 0.734317 |
ed146d162e7520c77aedaeedd58b72f2dfc80094 | 6,207 | pl | Perl | calculate_collinearity_metric.pl | djwcisel/collinearity | c7b005084a9d33926cd2accff106338873e59ed8 | [
"MIT"
] | null | null | null | calculate_collinearity_metric.pl | djwcisel/collinearity | c7b005084a9d33926cd2accff106338873e59ed8 | [
"MIT"
] | null | null | null | calculate_collinearity_metric.pl | djwcisel/collinearity | c7b005084a9d33926cd2accff106338873e59ed8 | [
"MIT"
] | null | null | null | #!/usr/bin/env perl
## author: reubwn May 2017
use strict;
use warnings;
use Getopt::Long;
use List::Util qw /sum/;
my $usage = "
SYNOPSIS
Calculates 'collinearity' score based on the number of collinear genes divided by the total number of genes within that defined block.
Takes the collinearity file and the 'gff' file used in MCScanX analyses.
If ka/ks values are present, eg by running the MCScanX 'add_kaks_to_MCScanX.pl' program first, script will also print average Ka and Ks values per block if -k option is set.
OUTPUT
Prints to a file 'Xyz.collinearity.score'; prints score for each block plus an average.
Also prints a file 'Xyz.collinearity.reformatted', which removes some of the formatting issues in the original MCScanX 'Xyz.collinearity' file.
OPTIONS
-i|--in [FILE] : MCScanX collinearity file
-g|--gff [FILE] : MCScanX GFF file
-k|--kaks : also calculate average Ka & Ks per block
-h|--help : print this message
USAGE
>> calculate_collinarity_metric.pl -i xyz.collinearity -g xyz.gff
>> calculate_collinarity_metric.pl -i xyz.collinearity.kaks -g xyz.gff -k
\n";
my ($collinearity, $gff, $kaks, $help, $debug);
GetOptions (
'i|in=s' => \$collinearity,
'g|gff=s' => \$gff,
'k|kaks' => \$kaks,
'h|help' => \$help,
'd|debug' => \$debug
);
die $usage if $help;
die $usage unless ($collinearity && $gff);
print STDERR "[INFO] Collinearity file: $collinearity\n";
print STDERR "[INFO] Add average Ka and Ks: TRUE\n" if ($kaks);
my (%blocks,$chrom1,$chrom2,$orientation);
open (my $COL, $collinearity) or die $!;
open (my $REFORMAT, ">$collinearity.reformatted") or die $!;
while (<$COL>) {
chomp;
if ($_ =~ m/^#/) { #this is trying to process the run information.... delete this from the collinearity file?
print $REFORMAT "$_\n";
if ($_ =~ m/(plus|minus)$/) { ## get chrom names and strand orientation of block 2
$orientation = $1;
my @line = split (/\s+/, $_);
my @chroms = split (/&/, $line[-2]); ##accesses second to last element of array, [-1] is the last element
$chrom1 = $chroms[0];
$chrom2 = $chroms[1];
next;
} else {
next;
}
}
$_ =~ s/^\s+|\s+$//g; ##remove leading and trailing whitespaces
my @F = split (m/\s+/, $_);
my $aln_number;
if ($F[0]=~m/\d+\-\d+\:/) { ## sometimes columns not formatted properly... :/
my @a = split (m/\-/, $F[0]);
push @{ $blocks{$a[0]}{block1} }, $F[1];
push @{ $blocks{$a[0]}{block2} }, $F[2];
$aln_number = $a[0];
## print to $REFORMAT; this should be easier to use for downstream analyses
$a[1] =~ s/\://;
my @N = @F;
print $REFORMAT join "\t", @a, splice (@N,1), "\n";
} else {
$F[0] =~ s/\-//;
push @{ $blocks{$F[0]}{block1} }, $F[2];
push @{ $blocks{$F[0]}{block2} }, $F[3];
$aln_number = $F[0];
$F[1] =~ s/\://;
print $REFORMAT join "\t", @F, "\n";
}
## dump genes and plus/minus info into %blocks
$blocks{$aln_number}{chrom1} = $chrom1;
$blocks{$aln_number}{chrom2} = $chrom2;
$blocks{$aln_number}{orientation} = $orientation;
print STDOUT "$aln_number $chrom1 $chrom2 $orientation\n" if $debug;
if ($kaks) {
push @{ $blocks{$aln_number}{ks} }, $F[-1]; ## ks is in final column
push @{ $blocks{$aln_number}{ka} }, $F[-2]; ## ka is in second to last column
}
}
close $COL;
close $REFORMAT;
open (my $OUT, ">$collinearity.score") or die $!;
if ($kaks) {
print $OUT join "\t", "block_num","chrom1","chrom2","collinear_genes","total_genes1","total_genes2","orientation","score_block1","score_block2","score_avg","ka_avg","ks_avg","\n";
} else {
print $OUT join "\t", "block_num","chrom1","chrom2","collinear_genes","total_genes1","total_genes2","orientation","score_block1","score_block2","score_avg","\n";
}
foreach (sort {$a<=>$b} keys %blocks) {
## get orientation of block2
my $orientation = $blocks{$_}{orientation};
## get genes of block1
my @block1_genes = @{ $blocks{$_}{block1} };
my $bl1_start = shift @block1_genes;
my $bl1_end = pop @block1_genes;
my $bl1_length = `perl -e 'while (<>){print if (/\t\Q$bl1_start\E\t/../\t\Q$bl1_end\E\t/);}' $gff | wc -l`;
chomp ($bl1_length);
my $score_block1 = sprintf("%.5f",(scalar(@block1_genes)/$bl1_length));
## get genes of block2
my @block2_genes = @{ $blocks{$_}{block2} };
my $bl2_start = shift @block2_genes;
my $bl2_end = pop @block2_genes;
my $bl2_length;
## if block 2 is in minus orientation, need to reverse the search!
if ($orientation eq "plus") {
$bl2_length = `perl -e 'while (<>){print if (/\t\Q$bl2_start\E\t/../\t\Q$bl2_end\E\t/);}' $gff | wc -l`;
} elsif ($orientation eq "minus") {
$bl2_length = `perl -e 'while (<>){print if (/\t\Q$bl2_end\E\t/../\t\Q$bl2_start\E\t/);}' $gff | wc -l`;
} else {
die "\nUnknown strand orientation for block 2: $orientation\n\n";
}
chomp ($bl2_length);
print STDERR "\r[INFO] Calculating scores for block: $_";
my $score_block2 = sprintf("%.5f",(scalar(@block2_genes)/$bl2_length));
## get kaks values if present
if ($kaks) {
my @ka = grep {$_ >= 0} @{ $blocks{$_}{ka} }; ## exclude negative values from calculation;
my @ks = grep {$_ >= 0} @{ $blocks{$_}{ks} }; ## these are "-2", output when ka/ks cannot be calulated for some reason
print $OUT join "\t",
$_,
$blocks{$_}{chrom1},
$blocks{$_}{chrom2},
scalar(@block1_genes),
$bl1_length,
$bl2_length,
$orientation,
$score_block1,
$score_block2,
sprintf("%.5f",(($score_block1+$score_block2)/2)),
sprintf("%.5f",(avg(@ka))),
sprintf("%.5f",(avg(@ks))),
"\n";
} else {
print $OUT join "\t",
$_,
$blocks{$_}{chrom1}, #this was missing
$blocks{$_}{chrom2}, #this was missing
scalar(@block1_genes),
scalar(@block2_genes), #this was missing
$bl1_length,
$bl2_length,
$orientation,
$score_block1,
$score_block2,
sprintf("%.5f",(($score_block1+$score_block2)/2)),
"\n";
}
}
close $OUT;
print STDERR "\n[INFO] Finished on ".`date`."\n";
sub avg {
if (scalar(@_) == 0) {
return '-2';
} else {
return sum(@_)/@_;
}
}
__END__
| 34.292818 | 181 | 0.607057 |
ed8b981e37fc1ae4b9d4f79bf87e5993689bceb5 | 1,400 | t | Perl | test/008-multiple_around.t | LuaDist/lua-coat | 9e3801785460a6b6031bb4ea2f5a34244eda4228 | [
"MIT"
] | null | null | null | test/008-multiple_around.t | LuaDist/lua-coat | 9e3801785460a6b6031bb4ea2f5a34244eda4228 | [
"MIT"
] | null | null | null | test/008-multiple_around.t | LuaDist/lua-coat | 9e3801785460a6b6031bb4ea2f5a34244eda4228 | [
"MIT"
] | 1 | 2022-01-14T10:25:39.000Z | 2022-01-14T10:25:39.000Z | #!/usr/bin/env lua
require 'Coat'
class 'Parent'
function method:orig (...)
local val = ...
table.insert( _G.seen, 'orig : ' .. val )
return 1
end
class 'Child'
extends 'Parent'
function around:orig (func, ...)
local val = ...
table.insert( _G.seen, 'around 1 before : ' .. val)
local result = func(self, ...)
table.insert( _G.seen, 'around 1 after' )
return result + 1
end
function around:orig (func, ...)
local val = ...
table.insert( _G.seen, 'around 2 before : ' .. val)
local result = func(self, ...)
table.insert( _G.seen, 'around 2 after' )
return result + 1
end
require 'Test.More'
plan(9)
if os.getenv "GEN_PNG" and os.execute "dot -V" == 0 then
local f = io.popen("dot -T png -o 008.png", 'w')
f:write(require 'Coat.UML'.to_dot())
f:close()
end
p = Parent.new()
ok( p:isa 'Parent', "Simple" )
ok( p.orig )
_G.seen = {}
p:orig 'val'
eq_array( _G.seen, { 'orig : val' } )
c = Child.new()
ok( c:isa 'Child', "MultipleAround" )
ok( c:isa 'Parent' )
ok( c.orig )
_G.seen = {}
is( c:orig 'val', 3 )
eq_array( _G.seen, {
'around 2 before : val',
'around 1 before : val',
'orig : val',
'around 1 after',
'around 2 after',
} )
error_like([[function Child.around:_orig_ () end]],
"Cannot around non%-existent method _orig_ in class Child")
| 21.212121 | 70 | 0.567143 |
ed624d6ef027c639566d06702864cddb67da9d3a | 4,618 | pm | Perl | auto-lib/Paws/LicenseManager/CheckoutBorrowLicense.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/LicenseManager/CheckoutBorrowLicense.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/LicenseManager/CheckoutBorrowLicense.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z |
package Paws::LicenseManager::CheckoutBorrowLicense;
use Moose;
has CheckoutMetadata => (is => 'ro', isa => 'ArrayRef[Paws::LicenseManager::Metadata]');
has ClientToken => (is => 'ro', isa => 'Str', required => 1);
has DigitalSignatureMethod => (is => 'ro', isa => 'Str', required => 1);
has Entitlements => (is => 'ro', isa => 'ArrayRef[Paws::LicenseManager::EntitlementData]', required => 1);
has LicenseArn => (is => 'ro', isa => 'Str', required => 1);
has NodeId => (is => 'ro', isa => 'Str');
use MooseX::ClassAttribute;
class_has _api_call => (isa => 'Str', is => 'ro', default => 'CheckoutBorrowLicense');
class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::LicenseManager::CheckoutBorrowLicenseResponse');
class_has _result_key => (isa => 'Str', is => 'ro');
1;
### main pod documentation begin ###
=head1 NAME
Paws::LicenseManager::CheckoutBorrowLicense - Arguments for method CheckoutBorrowLicense on L<Paws::LicenseManager>
=head1 DESCRIPTION
This class represents the parameters used for calling the method CheckoutBorrowLicense on the
L<AWS License Manager|Paws::LicenseManager> service. Use the attributes of this class
as arguments to method CheckoutBorrowLicense.
You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to CheckoutBorrowLicense.
=head1 SYNOPSIS
my $license-manager = Paws->service('LicenseManager');
my $CheckoutBorrowLicenseResponse =
$license -manager->CheckoutBorrowLicense(
ClientToken => 'MyClientToken',
DigitalSignatureMethod => 'JWT_PS384',
Entitlements => [
{
Name => 'MyString',
Unit => 'Count'
, # values: Count, None, Seconds, Microseconds, Milliseconds, Bytes, Kilobytes, Megabytes, Gigabytes, Terabytes, Bits, Kilobits, Megabits, Gigabits, Terabits, Percent, Bytes/Second, Kilobytes/Second, Megabytes/Second, Gigabytes/Second, Terabytes/Second, Bits/Second, Kilobits/Second, Megabits/Second, Gigabits/Second, Terabits/Second, Count/Second
Value => 'MyString',
},
...
],
LicenseArn => 'MyArn',
CheckoutMetadata => [
{
Name => 'MyString',
Value => 'MyString',
},
...
], # OPTIONAL
NodeId => 'MyString', # OPTIONAL
);
# Results:
my $CheckoutMetadata = $CheckoutBorrowLicenseResponse->CheckoutMetadata;
my $EntitlementsAllowed =
$CheckoutBorrowLicenseResponse->EntitlementsAllowed;
my $Expiration = $CheckoutBorrowLicenseResponse->Expiration;
my $IssuedAt = $CheckoutBorrowLicenseResponse->IssuedAt;
my $LicenseArn = $CheckoutBorrowLicenseResponse->LicenseArn;
my $LicenseConsumptionToken =
$CheckoutBorrowLicenseResponse->LicenseConsumptionToken;
my $NodeId = $CheckoutBorrowLicenseResponse->NodeId;
my $SignedToken = $CheckoutBorrowLicenseResponse->SignedToken;
# Returns a L<Paws::LicenseManager::CheckoutBorrowLicenseResponse> object.
Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object.
For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/license-manager/CheckoutBorrowLicense>
=head1 ATTRIBUTES
=head2 CheckoutMetadata => ArrayRef[L<Paws::LicenseManager::Metadata>]
Information about constraints.
=head2 B<REQUIRED> ClientToken => Str
Unique, case-sensitive identifier that you provide to ensure the
idempotency of the request.
=head2 B<REQUIRED> DigitalSignatureMethod => Str
Digital signature method. The possible value is JSON Web Signature
(JWS) algorithm PS384. For more information, see RFC 7518 Digital
Signature with RSASSA-PSS
(https://tools.ietf.org/html/rfc7518#section-3.5).
Valid values are: C<"JWT_PS384">
=head2 B<REQUIRED> Entitlements => ArrayRef[L<Paws::LicenseManager::EntitlementData>]
License entitlements. Partial checkouts are not supported.
=head2 B<REQUIRED> LicenseArn => Str
Amazon Resource Name (ARN) of the license. The license must use the
borrow consumption configuration.
=head2 NodeId => Str
Node ID.
=head1 SEE ALSO
This class forms part of L<Paws>, documenting arguments for method CheckoutBorrowLicense in L<Paws::LicenseManager>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 34.721805 | 357 | 0.709398 |
73e26fe2e7dff02943eff42cd3346b60204c8db5 | 1,344 | pl | Perl | it.unibo.finaltask.testing/maps/fixed/map78.pl | bobcorn/robot-cleaner | 9398de2478d94f08e4b2d9517b0192464d60d9a9 | [
"MIT"
] | null | null | null | it.unibo.finaltask.testing/maps/fixed/map78.pl | bobcorn/robot-cleaner | 9398de2478d94f08e4b2d9517b0192464d60d9a9 | [
"MIT"
] | null | null | null | it.unibo.finaltask.testing/maps/fixed/map78.pl | bobcorn/robot-cleaner | 9398de2478d94f08e4b2d9517b0192464d60d9a9 | [
"MIT"
] | null | null | null | status(cell(0,0),1).
status(cell(0,1),1).
status(cell(0,2),1).
status(cell(0,3),1).
status(cell(0,4),1).
status(cell(0,5),1).
status(cell(0,6),1).
status(cell(0,7),1).
status(cell(1,0),1).
status(cell(1,1),1).
status(cell(1,2),1).
status(cell(1,3),1).
status(cell(1,4),1).
status(cell(1,5),1).
status(cell(1,6),1).
status(cell(1,7),1).
status(cell(2,0),1).
status(cell(2,1),1).
status(cell(2,2),1).
status(cell(2,3),1).
status(cell(2,4),1).
status(cell(2,5),1).
status(cell(2,6),1).
status(cell(2,7),1).
status(cell(3,0),1).
status(cell(3,1),1).
status(cell(3,2),1).
status(cell(3,3),t).
status(cell(3,4),t).
status(cell(3,5),1).
status(cell(3,6),1).
status(cell(3,7),1).
status(cell(4,0),1).
status(cell(4,1),1).
status(cell(4,2),1).
status(cell(4,3),0).
status(cell(4,4),0).
status(cell(4,5),0).
status(cell(4,6),0).
status(cell(4,7),0).
status(cell(5,0),0).
status(cell(5,1),0).
status(cell(5,2),0).
status(cell(5,3),0).
status(cell(5,4),0).
status(cell(5,5),0).
status(cell(5,6),0).
status(cell(5,7),0).
status(cell(6,0),0).
status(cell(6,1),0).
status(cell(6,2),0).
status(cell(6,3),0).
status(cell(6,4),0).
status(cell(6,5),0).
status(cell(6,6),0).
status(cell(6,7),0).
status(cell(7,0),0).
status(cell(7,1),0).
status(cell(7,2),0).
status(cell(7,3),0).
status(cell(7,4),0).
status(cell(7,5),0).
status(cell(7,6),0).
status(cell(7,7),0).
| 20.676923 | 20 | 0.619048 |
ede0808ba0902b67be2f7cbf73c03f6ebe0817f5 | 1,569 | pl | Perl | build-osx.pl | Unity-Technologies/android-jni-bridge | 2462c300a6adb79bd16394a815f5943bb7091d46 | [
"MIT"
] | 44 | 2015-04-21T04:31:26.000Z | 2022-03-10T03:02:03.000Z | build-osx.pl | Unity-Technologies/android-jni-bridge | 2462c300a6adb79bd16394a815f5943bb7091d46 | [
"MIT"
] | 18 | 2015-06-18T10:16:03.000Z | 2022-03-23T08:13:56.000Z | build-osx.pl | Unity-Technologies/android-jni-bridge | 2462c300a6adb79bd16394a815f5943bb7091d46 | [
"MIT"
] | 16 | 2015-06-18T10:07:32.000Z | 2021-12-30T11:41:53.000Z | #!/usr/bin/env perl -w
use Cwd qw( abs_path );
use File::Basename qw( dirname );
use lib dirname(abs_path($0));
use PrepareAndroidSDK;
use File::Path;
use strict;
use warnings;
my $api = "jdk-7";
my @classes = (
'::java::lang::System',
'::java::lang::UnsupportedOperationException'
);
sub BuildOSX
{
my $class_names = join(' ', @classes);
my $threads = 8;
system("make clean") && die("Clean failed");
system("make -j$threads PLATFORM=darwin APINAME=\"$api\" APICLASSES=\"$class_names\"") && die("Failed to make osx library");
}
sub ZipIt
{
system("mkdir -p build/temp/include") && die("Failed to create temp directory.");
# write build info
my $git_info = qx(git symbolic-ref -q HEAD && git rev-parse HEAD);
open(BUILD_INFO_FILE, '>', "build/temp/build.txt") or die("Unable to write build information to build/temp/build.txt");
print BUILD_INFO_FILE "$git_info";
close(BUILD_INFO_FILE);
# create zip
system("cp build/$api/source/*.h build/temp/include") && die("Failed to copy headers.");
system("cd build && jar cf temp/jnibridge.jar bitter") && die("Failed to create java class archive.");
system("cd build/$api && zip ../builds.zip -r darwin/*.a") && die("Failed to package libraries into zip file.");
system("cd build/temp && zip ../builds.zip -r jnibridge.jar build.txt include") && die("Failed to package headers into zip file.");
system("rm -r build/temp") && die("Unable to remove temp directory.");
system("cd test; unzip -o ../build/builds.zip; touch Test.cpp") && die("Unable to prepare for tests");
}
BuildOSX();
ZipIt();
| 32.020408 | 132 | 0.676864 |
ed98ddcd3a4ec4ac4f64bfbbbb6cbdbc423f5cee | 219 | pm | Perl | example-perl-psgi/.cpanm/work/1459817744.11/Dist-CheckConflicts-0.11/t/lib/dist/Bar/Conflicts/Good2.pm | NINGONDA/apcera | 60ccb872a56566bde41594546ba4e97e0d73eae0 | [
"MIT"
] | null | null | null | example-perl-psgi/.cpanm/work/1459817744.11/Dist-CheckConflicts-0.11/t/lib/dist/Bar/Conflicts/Good2.pm | NINGONDA/apcera | 60ccb872a56566bde41594546ba4e97e0d73eae0 | [
"MIT"
] | null | null | null | example-perl-psgi/.cpanm/work/1459817744.11/Dist-CheckConflicts-0.11/t/lib/dist/Bar/Conflicts/Good2.pm | NINGONDA/apcera | 60ccb872a56566bde41594546ba4e97e0d73eae0 | [
"MIT"
] | null | null | null | package Bar::Conflicts::Good2;
use strict;
use warnings;
use Dist::CheckConflicts
-dist => 'Bar',
-conflicts => {
'Bar::Two' => '0.01',
},
-also => [
'Bar::Conflicts::Good3',
];
1;
| 14.6 | 32 | 0.511416 |
ede14719c71714cf8d22b2c666006de42ecccf65 | 2,273 | pl | Perl | examples/0-basic/canvas2.pl | kmx/perl-iup | 0178f76a39f01f750e1406f01c3c2d84a3a48ea9 | [
"MIT",
"Unlicense"
] | 5 | 2015-02-10T02:29:10.000Z | 2020-02-24T18:39:15.000Z | examples/0-basic/canvas2.pl | kmx/perl-iup | 0178f76a39f01f750e1406f01c3c2d84a3a48ea9 | [
"MIT",
"Unlicense"
] | 3 | 2015-04-03T11:55:20.000Z | 2021-03-06T15:27:56.000Z | examples/0-basic/canvas2.pl | kmx/perl-iup | 0178f76a39f01f750e1406f01c3c2d84a3a48ea9 | [
"MIT",
"Unlicense"
] | 2 | 2015-02-06T00:30:28.000Z | 2015-06-15T21:33:56.000Z | # IUP::Canvas example
#
# This example shows how several canvas callbacks are used
# and how the scrollbar works.
use strict;
use warnings;
use IUP ':all';
my %stars = ( );
my $label = IUP::Label->new( TITLE=>'IUP::Canvas');
sub redraw_cb {
my $self = shift;
$self->cdActivate();
$self->cdClear();
for (keys %stars) {
my ($gx, $gy) = split /:/, $_;
my $x = $gx-320*$self->POSX;
my $y = $gy-200*$self->POSY;
$y = $self->cdUpdateYAxis($y);
$self->cdMark($x, $y);
}
return IUP_DEFAULT;
};
sub button_cb {
my ($self, $but, $press, $x, $y) = @_;
my $gx = 320*$self->POSX+$x;
my $gy = 200*$self->POSY+$y;
if ($but == IUP_BUTTON1 && $press == 1) {
$y = $self->cdUpdateYAxis($y);
$self->cdMark($x, $y);
$stars{"$gx:$gy"} = 1;
}
return IUP_DEFAULT;
};
sub scroll_cb {
my $self = shift;
redraw_cb($self);
return IUP_DEFAULT;
}
sub motion_cb {
my ($self, $mx, $my, $r) = @_;
$mx += 320*$self->POSX;
$my += 200*$self->POSY;
$label->TITLE("[$mx,$my]");
return IUP_DEFAULT;
};
sub enter_cb {
my $self = shift;
$self->cdBackground(CD_WHITE);
redraw_cb($self);
return IUP_DEFAULT;
}
sub leave_cb {
my $self = shift;
$self->cdBackground(CD_GRAY);
redraw_cb($self,0.0,0.0);
$label->TITLE('IUP::Canvas');
return IUP_DEFAULT;
}
my $cv = IUP::Canvas->new( CURSOR=>"CROSS", RASTERSIZE=>"320x200",
EXPAND=>"NO", SCROLLBAR=>"YES",
DX=>0.5, DY=>0.5 );
$cv->SetCallback( ACTION=>\&redraw_cb, BUTTON_CB=>\&button_cb,
SCROLL_CB=>\&scroll_cb, MOTION_CB=>\&motion_cb,
ENTERWINDOW_CB=>\&enter_cb, LEAVEWINDOW_CB=>\&leave_cb );
my $dg = IUP::Dialog->new( child=>IUP::Vbox->new([
$cv,
IUP::Hbox->new( child=>[
IUP::Fill->new(),
$label,
IUP::Fill->new()
]),
]), TITLE=>"Welcome to IUP::Canvas demo",
RESIZE=>"NO", MAXBOX=>"NO" );
$dg->ShowXY(IUP_CENTER, IUP_CENTER);
IUP->MainLoop;
| 25.255556 | 76 | 0.487901 |
ed72e170e290db653d5bc645e5282d2d28038c4f | 387 | t | Perl | t/manifest.t | tofjw/Xymon-Plugin-Server | 08b5056db2fe8ee6301f2685cd9a507c49919980 | [
"Artistic-2.0",
"Unlicense"
] | 2 | 2020-08-27T14:23:39.000Z | 2021-06-05T16:45:09.000Z | t/manifest.t | tofjw/Xymon-Plugin-Server | 08b5056db2fe8ee6301f2685cd9a507c49919980 | [
"Artistic-2.0",
"Unlicense"
] | null | null | null | t/manifest.t | tofjw/Xymon-Plugin-Server | 08b5056db2fe8ee6301f2685cd9a507c49919980 | [
"Artistic-2.0",
"Unlicense"
] | null | null | null | #!perl -T
use 5.006;
use strict;
use warnings FATAL => 'all';
use Test::More;
unless ( $ENV{RELEASE_TESTING} ) {
plan( skip_all => "Author tests not required for installation" );
}
my $min_tcm = 0.9;
eval "use Test::CheckManifest $min_tcm";
plan skip_all => "Test::CheckManifest $min_tcm required" if $@;
ok_manifest({ filter => [qr/\.git/, qr/.*~/, qr/.*\.bak/, qr|t/tmp/.*|]});
| 24.1875 | 74 | 0.638243 |
edbfa18c9afe4fc6d8d7b87c96c21f3bf877b04e | 25,466 | pl | Perl | samples/NGPF/RoutingSwitching/BGP/BGP+.pl | jackjack821/ixnetwork-api-pl | b065a0a668023902e93527dce55f66378c17cc65 | [
"MIT"
] | null | null | null | samples/NGPF/RoutingSwitching/BGP/BGP+.pl | jackjack821/ixnetwork-api-pl | b065a0a668023902e93527dce55f66378c17cc65 | [
"MIT"
] | null | null | null | samples/NGPF/RoutingSwitching/BGP/BGP+.pl | jackjack821/ixnetwork-api-pl | b065a0a668023902e93527dce55f66378c17cc65 | [
"MIT"
] | 1 | 2019-10-28T08:09:12.000Z | 2019-10-28T08:09:12.000Z | ################################################################################
# #
# Copyright 1997 - 2019 by IXIA Keysight #
# All Rights Reserved. #
# #
################################################################################
################################################################################
# #
# LEGAL NOTICE: #
# ============== #
# The following code and documentation (hereinafter "the script") is an #
# example script for demonstration purposes only. #
# The script is not a standard commercial product offered by Ixia and have #
# been developed and is being provided for use only as indicated herein. The #
# script [and all modifications enhancements and updates thereto (whether #
# made by Ixia and/or by the user and/or by a third party)] shall at all times #
# remain the property of Ixia. #
# #
# Ixia does not warrant (i) that the functions contained in the script will #
# meet the users requirements or (ii) that the script will be without #
# omissions or error-free. #
# THE SCRIPT IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND AND IXIA #
# DISCLAIMS ALL WARRANTIES EXPRESS IMPLIED STATUTORY OR OTHERWISE #
# INCLUDING BUT NOT LIMITED TO ANY WARRANTY OF MERCHANTABILITY AND FITNESS FOR #
# A PARTICULAR PURPOSE OR OF NON-INFRINGEMENT. #
# THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE SCRIPT IS WITH THE #
# USER. #
# IN NO EVENT SHALL IXIA BE LIABLE FOR ANY DAMAGES RESULTING FROM OR ARISING #
# OUT OF THE USE OF OR THE INABILITY TO USE THE SCRIPT OR ANY PART THEREOF #
# INCLUDING BUT NOT LIMITED TO ANY LOST PROFITS LOST BUSINESS LOST OR #
# DAMAGED DATA OR SOFTWARE OR ANY INDIRECT INCIDENTAL PUNITIVE OR #
# CONSEQUENTIAL DAMAGES EVEN IF IXIA HAS BEEN ADVISED OF THE POSSIBILITY OF #
# SUCH DAMAGES IN ADVANCE. #
# Ixia will not be required to provide any software maintenance or support #
# services of any kind (e.g. any error corrections) in connection with the #
# script or any part thereof. The user acknowledges that although Ixia may #
# from time to time and in its sole discretion provide maintenance or support #
# services for the script any such services are subject to the warranty and #
# damages limitations set forth herein and will not obligate Ixia to provide #
# any additional maintenance or support services. #
# #
################################################################################
################################################################################
# #
# Description: #
# This script intends to demonstrate how to use NGPF BGP API. #
# #
# 1. It will create 2 BGP topologies, each having an ipv4 network #
# topology and loopback device group behind the network group(NG) with #
# loopback interface on it. A loopback device group(DG) behind network #
# group is needed to support applib traffic. #
# 2. Start the BGP protocol. #
# 3. Retrieve protocol statistics. #
# 4. Enable BGP IPv4 Learned Information Filter on the fly. #
# 5. Retrieve protocol learned info. #
# 7. Configure L2-L3 traffic. #
# 8. Configure application traffic. #
# 9. Start the L2-L3 traffic. #
# 10. Start the application traffic. #
# 11. Retrieve Application traffic stats. #
# 12. Retrieve L2-L3 traffic stats. #
# 13. Stop L2-L3 traffic. #
# 14. Stop Application traffic. #
# 15. Stop all protocols. #
# #
################################################################################
################################################################################
# Please ensure that PERL5LIB environment variable is set properly so that #
# IxNetwork.pm module is available. IxNetwork.pm is generally available in #
# C:\<IxNetwork Install Path>\API\Perl #
################################################################################
use IxNetwork;
use strict;
sub assignPorts {
my @my_resource = @_;
my $ixNet = $my_resource[0];
my $chassis1 = $my_resource[1];
my $card1 = $my_resource[2];
my $port1 = $my_resource[3];
my $chassis2 = $my_resource[4];
my $card2 = $my_resource[5];
my $port2 = $my_resource[6];
my $vport1 = $my_resource[7];
my $vport2 = $my_resource[8];
my $root = $ixNet->getRoot();
my $chassisObj1 = $ixNet->add($root.'/availableHardware', 'chassis');
$ixNet->setAttribute($chassisObj1, '-hostname', $chassis1);
$ixNet->commit();
$chassisObj1 = ($ixNet->remapIds($chassisObj1))[0];
my $chassisObj2 = '';
if ($chassis1 ne $chassis2) {
$chassisObj2 = $ixNet->add($root.'/availableHardware', 'chassis');
$ixNet->setAttribute($chassisObj2, '-hostname', $chassis2);
$ixNet->commit();
$chassisObj2 = ($ixNet->remapIds($chassisObj2))[0];
} else {
$chassisObj2 = $chassisObj1;
}
my $cardPortRef1 = $chassisObj1.'/card:'.$card1.'/port:'.$port1;
$ixNet->setMultiAttribute($vport1, '-connectedTo', $cardPortRef1,
'-rxMode', 'captureAndMeasure', '-name', 'Ethernet - 001');
$ixNet->commit();
my $cardPortRef2 = $chassisObj2.'/card:'.$card2.'/port:'.$port2;
$ixNet->setMultiAttribute($vport2, '-connectedTo', $cardPortRef2,
'-rxMode', 'captureAndMeasure', '-name', 'Ethernet - 002');
$ixNet->commit();
}
# Script Starts
print("!!! Test Script Starts !!!\n");
# Edit this variables values to match your setup
my $ixTclServer = '10.205.25.97';
my $ixTclPort = '8009';
my @ports = (('10.205.28.63', '10', '13'), ('10.205.28.63', '10', '14'));
# Spawn a new instance of IxNetwork object.
my $ixNet = new IxNetwork();
print("Connect to IxNetwork Tcl server\n");
$ixNet->connect($ixTclServer, '-port', $ixTclPort, '-version', '7.40',
'-setAttribute', 'strict');
print("Creating a new config\n");
$ixNet->execute('newConfig');
print("Adding 2 vports\n");
$ixNet->add($ixNet->getRoot(), 'vport');
$ixNet->add($ixNet->getRoot(), 'vport');
$ixNet->commit();
my @vPorts = $ixNet->getList($ixNet->getRoot(), 'vport');
my $vportTx = $vPorts[0];
my $vportRx = $vPorts[1];
assignPorts($ixNet, @ports, $vportTx, $vportRx);
sleep(5);
print("Adding 2 topologies\n");
$ixNet->add($ixNet->getRoot(), 'topology', '-vports', $vportTx);
$ixNet->add($ixNet->getRoot(), 'topology', '-vports', $vportRx);
$ixNet->commit();
my @topologies = $ixNet->getList($ixNet->getRoot(), 'topology');
my $topo1 = $topologies[0];
my $topo2 = $topologies[1];
print("Adding 2 device groups\n");
$ixNet->add($topo1, 'deviceGroup');
$ixNet->add($topo2, 'deviceGroup');
$ixNet->commit();
my @t1devices = $ixNet->getList($topo1, 'deviceGroup');
my @t2devices = $ixNet->getList($topo2, 'deviceGroup');
my $t1dev1 = $t1devices[0];
my $t2dev1 = $t2devices[0];
print("Configuring the multipliers (number of sessions)\n");
$ixNet->setAttribute($t1dev1, '-multiplier', '1');
$ixNet->setAttribute($t2dev1, '-multiplier', '1');
$ixNet->commit();
print("Adding ethernet/mac endpoints\n");
$ixNet->add($t1dev1, 'ethernet');
$ixNet->add($t2dev1, 'ethernet');
$ixNet->commit();
my $mac1 = ($ixNet->getList($t1dev1, 'ethernet'))[0];
my $mac2 = ($ixNet->getList($t2dev1, 'ethernet'))[0];
print("Configuring the mac addresses\n");
$ixNet->setMultiAttribute($ixNet->getAttribute($mac1, '-mac').'/counter',
'-direction', 'increment',
'-start', '18:03:73:C7:6C:B1',
'-step', '00:00:00:00:00:01');
$ixNet->setAttribute($ixNet->getAttribute($mac2, '-mac').'/singleValue',
'-value', '18:03:73:C7:6C:01');
$ixNet->commit();
# puts "ixNet help ::ixNet::OBJ-/topology/deviceGroup/ethernet"
# puts "[ixNet help ::ixNet::OBJ-/topology/deviceGroup/ethernet]"
print("Add ipv6\n");
$ixNet->add($mac1, 'ipv6');
$ixNet->add($mac2, 'ipv6');
$ixNet->commit();
my $ip1 = ($ixNet->getList($mac1, 'ipv6'))[0];
my $ip2 = ($ixNet->getList($mac2, 'ipv6'))[0];
my $mvAdd1 = $ixNet->getAttribute($ip1, '-address');
my $mvAdd2 = $ixNet->getAttribute($ip2, '-address');
my $mvGw1 = $ixNet->getAttribute($ip1, '-gatewayIp');
my $mvGw2 = $ixNet->getAttribute($ip2, '-gatewayIp');
print("configuring ipv6 addresses");
$ixNet->setAttribute($mvAdd1.'/singleValue', '-value', '11:0:0:0:0:0:0:1');
$ixNet->setAttribute($mvAdd2.'/singleValue', '-value', '11:0:0:0:0:0:0:2');
$ixNet->setAttribute($mvGw1.'/singleValue', '-value', '11:0:0:0:0:0:0:2');
$ixNet->setAttribute($mvGw2.'/singleValue', '-value', '11:0:0:0:0:0:0:1');
$ixNet->setAttribute($ixNet->getAttribute($ip1, '-prefix').'/singleValue', '-value', '64');
$ixNet->setAttribute($ixNet->getAttribute($ip2, '-prefix').'/singleValue', '-value', '64');
$ixNet->setMultiAttribute($ixNet->getAttribute($ip1, '-resolveGateway').'/singleValue', '-value', 'true');
$ixNet->setMultiAttribute($ixNet->getAttribute($ip2, '-resolveGateway').'/singleValue', '-value', 'true');
$ixNet->commit();
# puts "ixNet help ::ixNet::OBJ-/topology/deviceGroup/ethernet/ipv4"
# puts "[ixNet help ::ixNet::OBJ-/topology/deviceGroup/ethernet/ipv4]"
print("Adding BGP+ over IPv6 stacks");
$ixNet->add($ip1, 'bgpIpv6Peer');
$ixNet->add($ip2, 'bgpIpv6Peer');
$ixNet->commit();
my $bgp1 = ($ixNet->getList($ip1, 'bgpIpv6Peer'))[0];
my $bgp2 = ($ixNet->getList($ip2, 'bgpIpv6Peer'))[0];
print("Renaming the topologies and the device groups\n");
$ixNet->setAttribute($topo1, '-name', 'BGP+ Topology 1');
$ixNet->setAttribute($topo2, '-name', 'BGP+ Topology 2');
$ixNet->setAttribute($t1dev1, '-name', 'BGP+ Topology 1 Router');
$ixNet->setAttribute($t2dev1, '-name', 'BGP+ Topology 2 Router');
$ixNet->commit();
print("Adding NetworkGroup behind BGP+ DG\n");
$ixNet->execute('createDefaultStack', $t1dev1, 'ipv6PrefixPools');
$ixNet->execute('createDefaultStack', $t2dev1, 'ipv6PrefixPools');
my $networkGroup1 = ($ixNet->getList($t1dev1, 'networkGroup'))[0];
my $networkGroup2 = ($ixNet->getList($t2dev1, 'networkGroup'))[0];
$ixNet->setAttribute($networkGroup1, '-name', 'BGP+_1_Network_Group1');
$ixNet->setAttribute($networkGroup2, '-name', 'BGP+_2_Network_Group1');
$ixNet->commit();
print("Setting IPs in BGP+ DUT IP tab\n");
$ixNet->setAttribute($ixNet->getAttribute($bgp1, '-dutIp').'/singleValue', '-value', '11:0:0:0:0:0:0:2');
$ixNet->setAttribute($ixNet->getAttribute($bgp2, '-dutIp').'/singleValue', '-value', '11:0:0:0:0:0:0:1');
$ixNet->commit();
# Add ipv6 loopback1 for applib traffic
print("Adding ipv6 loopback1 for applib traffic\n");
my $chainedDg1 = $ixNet->add($networkGroup1, 'deviceGroup');
$ixNet->setMultiAttribute($chainedDg1, '-multiplier', '1', '-name', 'Device Group 4');
$ixNet->commit();
$chainedDg1 = ($ixNet->remapIds($chainedDg1))[0];
my $loopback1 = $ixNet->add($chainedDg1, 'ipv6Loopback');
$ixNet->setMultiAttribute($loopback1, '-stackedLayers', '', '-name', 'IPv6 Loopback 2');
$ixNet->commit();
my $addressSet1 = $ixNet->getAttribute($loopback1, '-address');
$ixNet->setMultiAttribute($addressSet1, '-clearOverlays', 'false', '-pattern', 'counter');
$ixNet->commit();
$addressSet1 = $ixNet->add($addressSet1, 'counter');
$ixNet->setMultiAttribute($addressSet1, '-step', '0:0:0:0:0:0:0:1',
'-start', '3000:0:1:1:0:0:0:0', '-direction', 'increment');
$ixNet->commit();
my $addressSet1 = ($ixNet->remapIds($addressSet1))[0];
# Add ipv6 loopback2 for applib traffic
print("Adding ipv6 loopback2 for applib traffic\n");
my $chainedDg2 = $ixNet->add($networkGroup2, 'deviceGroup');
$ixNet->setMultiAttribute($chainedDg2, '-multiplier', '1', '-name', 'Device Group 3');
$ixNet->commit();
$chainedDg2 = ($ixNet->remapIds($chainedDg2))[0];
my $loopback2 = $ixNet->add($chainedDg2, 'ipv6Loopback');
$ixNet->setMultiAttribute($loopback2, '-stackedLayers', '', '-name', 'IPv6 Loopback 1');
$ixNet->commit();
my $addressSet2 = $ixNet->getAttribute($loopback2, '-address');
$ixNet->setMultiAttribute($addressSet2, '-clearOverlays', 'false', '-pattern', 'counter');
$ixNet->commit();
$addressSet2 = $ixNet->add($addressSet2, 'counter');
$ixNet->setMultiAttribute($addressSet2, '-step', '0:0:0:0:0:0:0:1',
'-start', '3000:1:1:1:0:0:0:0', '-direction', 'increment');
$ixNet->commit();
$addressSet2 = ($ixNet->remapIds($addressSet2))[0];
################################################################################
# Start BGP+ protocol and wait for 45 seconds #
################################################################################
print("Starting protocols and waiting for 45 seconds for protocols to come up\n");
$ixNet->execute('startAllProtocols');
sleep(45);
################################################################################
# Retrieve protocol statistics #
################################################################################
print("Fetching all Protocol Summary Stats\n");
my $viewPage = '::ixNet::OBJ-/statistics/view:"Protocols Summary"/page';
my @statcap = $ixNet->getAttribute($viewPage, '-columnCaptions');
my @rowvals = $ixNet->getAttribute($viewPage, '-rowValues');
my $index = 0;
my $statValueList= '';
foreach $statValueList (@rowvals) {
print("***************************************************\n");
my $statVal = '';
foreach $statVal (@$statValueList) {
my $statIndiv = '';
$index = 0;
foreach $statIndiv (@$statVal) {
printf(" %-30s:%s\n", $statcap[$index], $statIndiv);
$index++;
}
}
}
print("***************************************************\n");
################################################################################
# Enable the BGP IPv6 Learned Information Filter #
# And apply changes On The Fly #
################################################################################
print("Enabling IPv6 Unicast Learned Information for BGP+ Router\n");
$ixNet->setAttribute($ixNet->getAttribute($bgp1, '-filterIpV6Unicast').'/singleValue', '-value', 'true');
$ixNet->setAttribute($ixNet->getAttribute($bgp2, '-filterIpV6Unicast').'/singleValue', '-value', 'true');
$ixNet->commit();
my $globals = ($ixNet->getRoot()).'/globals';
my $topology = $globals.'/topology';
print("Applying changes on the fly\n");
$ixNet->execute('applyOnTheFly', $topology);
sleep(10);
###############################################################################
# Retrieve protocol learned info #
###############################################################################
print("Fetching BGP IPv6 Learned Info\n");
$ixNet->execute('getIPv6LearnedInfo', $bgp1, '1');
sleep(5);
my $linfo = ($ixNet->getList($bgp1, 'learnedInfo'))[0];
my @values = $ixNet->getAttribute($linfo, '-values');
my $v = '';
print("***************************************************\n");
foreach $v (@values) {
my $w = '0';
foreach $w (@$v) {
printf("%10s", $w);
}
print("\n");
}
print("***************************************************\n");
################################################################################
# Configure L2-L3 traffic #
################################################################################
print ("Congfiguring L2-L3 Traffic Item\n");
my $trafficItem1 = $ixNet->add(($ixNet->getRoot()).'/traffic', 'trafficItem');
$ixNet->setMultiAttribute($trafficItem1, '-name', 'Traffic Item 1',
'-roundRobinPacketOrdering', 'false',
'-trafficType', 'ipv6');
$ixNet->commit();
$trafficItem1 = ($ixNet->remapIds($trafficItem1))[0];
my $endpointSet1 = $ixNet->add($trafficItem1, 'endpointSet');
my @source = ($networkGroup1.'/ipv6PrefixPools:1');
my @destination = ($networkGroup2.'/ipv6PrefixPools:1');
$ixNet->setMultiAttribute($endpointSet1,
'-name', 'EndpointSet-1',
'-multicastDestinations', (''),
'-scalableSources', (''),
'-multicastReceivers', (''),
'-scalableDestinations', (''),
'-ngpfFilters', (''),
'-trafficGroups', (''),
'-sources', @source,
'-destinations', @destination);
$ixNet->commit();
$ixNet->setMultiAttribute($trafficItem1.'/tracking',
'-trackBy', ['sourceDestEndpointPair0','trackingenabled0'],
'-fieldWidth', 'thirtyTwoBits',
'-protocolOffset', 'Root.0',
'-values', (''));
$ixNet->commit();
################################################################################
# Configure Application traffic #
################################################################################
print ("Configuring Applib traffic\n");
my $trafficItem2 = $ixNet->add(($ixNet->getRoot()).'/traffic', 'trafficItem');
$ixNet->setMultiAttribute($trafficItem2, '-name', 'Traffic Item 2',
'-trafficItemType', 'applicationLibrary',
'-roundRobinPacketOrdering', 'false',
'-trafficType', 'ipv6ApplicationTraffic');
$ixNet->commit();
my $trafficItem2 = ($ixNet->remapIds($trafficItem2))[0];
my $endpointSet2 = $ixNet->add($trafficItem2, 'endpointSet');
my @source_app = (($ixNet->getList($t1dev1, 'networkGroup'))[0]);
my @destin_app = (($ixNet->getList($t2dev1, 'networkGroup'))[0]);
$ixNet->setMultiAttribute($endpointSet2,
'-name', 'EndpointSet-2',
'-multicastDestinations', (''),
'-scalableSources', (''),
'-multicastReceivers', (''),
'-scalableDestinations', (''),
'-ngpfFilters', (''),
'-trafficGroups', (''),
'-sources', @source_app,
'-destinations', @destin_app);
$ixNet->commit();
$endpointSet2 = ($ixNet->remapIds($endpointSet2))[0];
my $appLibProfile = $ixNet->add($trafficItem2, 'appLibProfile');
my $flows_configured = ('Bandwidth_BitTorrent_File_Download',
'Bandwidth_eDonkey',
'Bandwidth_HTTP',
'Bandwidth_IMAPv4',
'Bandwidth_POP3',
'Bandwidth_Radius',
'Bandwidth_Raw',
'Bandwidth_Telnet',
'Bandwidth_uTorrent_DHT_File_Download',
'BBC_iPlayer',
'BBC_iPlayer_Radio',
'BGP_IGP_Open_Advertise_Routes',
'BGP_IGP_Withdraw_Routes',
'Bing_Search',
'BitTorrent_Ares_v217_File_Download',
'BitTorrent_BitComet_v126_File_Download',
'BitTorrent_Blizzard_File_Download',
'BitTorrent_Cisco_EMIX',
'BitTorrent_Enterprise',
'BitTorrent_File_Download',
'BitTorrent_LimeWire_v5516_File_Download',
'BitTorrent_RMIX_5M');
$ixNet->setMultiAttribute($appLibProfile,
'-enablePerIPStats', 'false',
'-objectiveDistribution', 'applyFullObjectiveToEachPort',
'-configuredFlows', $flows_configured);
$ixNet->commit();
my $appLibProfile = ($ixNet->remapIds($appLibProfile))[0];
# puts "ixNet help [ixNet getRoot]/traffic"
# puts "[ixNet help [ixNet getRoot]/traffic]"
###############################################################################
# Apply and start L2/L3 traffic #
###############################################################################
print("applying L2/L3 traffic\n");
$ixNet->execute('apply', ($ixNet->getRoot()).'/traffic');
sleep(5);
print("starting L2/L3 traffic\n");
$ixNet->execute('start', ($ixNet->getRoot()).'/traffic');
###############################################################################
# Apply and start applib traffic #
###############################################################################
print("applying applib traffic\n");
$ixNet->execute('applyStatefulTraffic', $ixNet->getRoot().'/traffic');
sleep(5);
print("starting applib traffic\n");
$ixNet->execute('startStatefulTraffic', $ixNet->getRoot().'/traffic');
print("Let traffic run for 1 minute\n");
sleep(60);
###############################################################################
# Retrieve Applib traffic item statistics #
###############################################################################
print("Verifying all the applib traffic stats\n");
my $viewPage = '::ixNet::OBJ-/statistics/view:"Application Traffic Item Statistics"/page';
my @statcap = $ixNet->getAttribute($viewPage, '-columnCaptions');
my @rowvals = $ixNet->getAttribute($viewPage, '-rowValues');
my $index = 0;
my $statValueList= '';
foreach $statValueList (@rowvals) {
print("***************************************************\n");
my $statVal = '';
foreach $statVal (@$statValueList) {
my $statIndiv = '';
$index = 0;
foreach $statIndiv (@$statVal) {
printf(" %-30s:%s\n", $statcap[$index], $statIndiv);
$index++;
}
}
}
print("***************************************************\n");
###############################################################################
# Retrieve L2/L3 traffic item statistics #
###############################################################################
print("Verifying all the L2-L3 traffic stats\n");
my $viewPage = '::ixNet::OBJ-/statistics/view:"Flow Statistics"/page';
my @statcap = $ixNet->getAttribute($viewPage, '-columnCaptions');
my @rowvals = $ixNet->getAttribute($viewPage, '-rowValues');
my $index = 0;
my $statValueList= '';
foreach $statValueList (@rowvals) {
print("***************************************************\n");
my $statVal = '';
foreach $statVal (@$statValueList) {
my $statIndiv = '';
$index = 0;
foreach $statIndiv (@$statVal) {
printf(" %-30s:%s\n", $statcap[$index], $statIndiv);
$index++;
}
}
}
print("***************************************************\n");
#################################################################################
# Stop applib traffic #
#################################################################################
print("Stopping applib traffic\n");
$ixNet->execute('stopStatefulTraffic', ($ixNet->getRoot()).'/traffic');
sleep(15);
#################################################################################
# Stop L2/L3 traffic #
#################################################################################
print("Stopping L2/L3 traffic\n");
$ixNet->execute('stop', ($ixNet->getRoot()).'/traffic');
sleep(5);
################################################################################
# Stop all protocols #
################################################################################
$ixNet->execute('stopAllProtocols');
print("!!! Test Script Ends !!!"); | 47.246753 | 107 | 0.487905 |
ede06c1c487bd0af06b1eff2ee10cf49c607b5cb | 1,971 | pl | Perl | data_format/process_exonerate_gff.pl | hyphaltip/genome-scripts | 65949403a34f019d5785bfb29bee6456c7e6f7e0 | [
"Artistic-2.0"
] | 46 | 2015-06-11T14:16:35.000Z | 2022-02-22T04:57:15.000Z | data_format/process_exonerate_gff.pl | harish0201/genome-scripts | 68234e24c463e22006b49267a76d87c774945b5c | [
"Artistic-2.0"
] | 1 | 2018-08-07T04:02:16.000Z | 2018-08-21T03:40:01.000Z | data_format/process_exonerate_gff.pl | harish0201/genome-scripts | 68234e24c463e22006b49267a76d87c774945b5c | [
"Artistic-2.0"
] | 47 | 2015-09-22T13:59:15.000Z | 2022-03-25T02:13:52.000Z | #!/usr/bin/perl -w
use strict;
use IO::String;
use Bio::Tools::GFF;
use Bio::DB::Fasta;
use Env;
use File::Spec;
my $out = Bio::Tools::GFF->new(-gff_version => 3);
my $db = Bio::DB::Fasta->new(File::Spec->catfile($HOME,qw(fungi
fungal_genomes
nt)));
my $state = 0;
my $buffer = '';
while(<>) {
if( $state == 1 ) {
if( /^\# --- END OF GFF DUMP ---/) {
my $in = Bio::Tools::GFF->new(-gff_version => 2,
-fh =>
IO::String->new($buffer));
my $gene;
my $length;
while( my $f = $in->next_feature ) {
my $srctag = $f->source_tag;
$srctag =~ s/\:/_/g;
$f->source_tag($srctag);
if( $f->primary_tag eq 'gene' ) {
$length ||= $db->length($f->seq_id);
($gene) = $f->get_tag_values('sequence');
$f->remove_tag('gene_orientation') if $f->has_tag('gene_orientation');
$f->add_tag_value('Id',$gene);
if( $f->strand < 0 ) {
# we're flip-flopping start/end
my $s = $length - $f->end;
my $e = $length - $f->start;
$f->start($s);
$f->end ($e);
}
$out->write_feature($f);
next;
} elsif( $f->primary_tag eq 'similarity') {
# make sub pieces
next;
#$f->add_tag_value('Target', $gene);
} elsif( $f->primary_tag eq 'splice5' ) {
$f->primary_tag('splice_donor');
} elsif( $f->primary_tag eq 'splice3' ) {
$f->primary_tag('splice_acceptor');
} elsif( $f->primary_tag eq 'exon' ) {
$f->primary_tag('CDS');
}
if( $f->strand < 0 ) {
my $s = $length - $f->end;
my $e = $length - $f->start;
$f->start($s);
$f->end ($e);
}
$f->add_tag_value('Parent', $gene);
$out->write_feature($f);
}
$state = 0;
$buffer = '';
next;
} elsif(/^\#/) { next; }
$buffer .= join("\t",split(/\s+/,$_,9));
} elsif( /^\# --- END OF GFF DUMP ---/) {
$state = 0;
$buffer = '';
} elsif( /^\# --- START OF GFF DUMP ---/ ) {
$state = 1;
$buffer = '';
}
}
| 25.597403 | 76 | 0.49721 |
edbca1a73315fe75939d641a869638c2677cccee | 186 | pl | Perl | dovecot/config.info.pl | nawawi/webmin | f82872f2b70fd1860a9a681083658a3f697e3ecf | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1,863 | 2015-01-04T21:45:45.000Z | 2022-03-30T09:10:50.000Z | dovecot/config.info.pl | nawawi/webmin | f82872f2b70fd1860a9a681083658a3f697e3ecf | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1,233 | 2015-01-03T12:45:51.000Z | 2022-03-31T02:39:58.000Z | dovecot/config.info.pl | nawawi/webmin | f82872f2b70fd1860a9a681083658a3f697e3ecf | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 546 | 2015-01-05T13:07:28.000Z | 2022-03-25T21:47:51.000Z | dovecot=Lokalizacja Dovecot,0
dovecot_config=Pełna ścieżka do pliku konfiguracyjnego Dovecot,0
init_script=Nazwa skryptu init Dovecota,3,Nie istnieje
pid_file=Plik PID serwera Dovecot,0
| 37.2 | 64 | 0.860215 |
edd7d47cadfafc66572d279bb08b751f67b7e1ee | 2,824 | al | Perl | Apps/CZ/CoreLocalizationPack/app/Src/PageExtensions/VATPostingSetupCard.PageExt.al | bjarkihall/ALAppExtensions | d8243d27e0280dec6e079ab9f1e838f9768c208c | [
"MIT"
] | 1 | 2021-08-16T18:14:49.000Z | 2021-08-16T18:14:49.000Z | Apps/CZ/CoreLocalizationPack/app/Src/PageExtensions/VATPostingSetupCard.PageExt.al | bjarkihall/ALAppExtensions | d8243d27e0280dec6e079ab9f1e838f9768c208c | [
"MIT"
] | null | null | null | Apps/CZ/CoreLocalizationPack/app/Src/PageExtensions/VATPostingSetupCard.PageExt.al | bjarkihall/ALAppExtensions | d8243d27e0280dec6e079ab9f1e838f9768c208c | [
"MIT"
] | 1 | 2021-02-09T10:23:09.000Z | 2021-02-09T10:23:09.000Z | pageextension 11757 "VAT Posting Setup Card CZL" extends "VAT Posting Setup Card"
{
layout
{
addafter(Usage)
{
group(VATCtrlReportCZL)
{
Caption = 'VAT Control Report';
field("VAT Rate CZL"; Rec."VAT Rate CZL")
{
ApplicationArea = VAT;
ToolTip = 'Specifies typ of VAT rate - base, reduced or reduced 2.';
}
field("Ratio Coefficient CZL"; Rec."Ratio Coefficient CZL")
{
ApplicationArea = VAT;
ToolTip = 'Specifies ratio coefficient.';
}
field("Corrections Bad Receivable CZL"; Rec."Corrections Bad Receivable CZL")
{
ApplicationArea = VAT;
ToolTip = 'Specifies the designation of the receivable for the purposes of VAT Control Report.';
}
field("Supplies Mode Code CZL"; Rec."Supplies Mode Code CZL")
{
ApplicationArea = VAT;
ToolTip = 'Specifies supplies mode code from VAT layer. The setting is used in the VAT Control Report.';
}
}
}
addafter("Sales VAT Unreal. Account")
{
field("Sales VAT Curr. Exch. Acc CZL"; Rec."Sales VAT Curr. Exch. Acc CZL")
{
ApplicationArea = VAT;
ToolTip = 'Specifies the G/L account for clearing sales VAT due to the different exchange rate for VAT';
}
}
addafter("Purch. VAT Unreal. Account")
{
field("Purch. VAT Curr. Exch. Acc CZL"; Rec."Purch. VAT Curr. Exch. Acc CZL")
{
ApplicationArea = VAT;
ToolTip = 'Specifies the G/L account for clearing purchase VAT due to the different exchange rate for VAT';
}
}
addlast(General)
{
field("Reverse Charge Check CZL"; Rec."Reverse Charge Check CZL")
{
ApplicationArea = VAT;
ToolTip = 'Specifies if the reverse charge will be checked';
}
}
addlast(Sales)
{
field("VIES Sales CZL"; Rec."VIES Sales CZL")
{
ApplicationArea = Basic, Suite;
ToolTip = 'Specifies the option to include this posting setup in sales VIES declarations.';
}
}
addlast(Purchases)
{
field("VIES Purchase CZL"; Rec."VIES Purchase CZL")
{
ApplicationArea = Basic, Suite;
ToolTip = 'Specifies the option to include this posting setup in the purchase VIES declarations.';
}
}
}
}
| 38.162162 | 124 | 0.501416 |
ede8505c2163f4771234692ee44d98348e7ec03f | 2,143 | t | Perl | t/regression/chart_errorbars03.t | robertholl/excel-writer-xlsx | f6624ba598401f57d5d67de0f5db3c76b10266d7 | [
"Artistic-1.0-Perl"
] | null | null | null | t/regression/chart_errorbars03.t | robertholl/excel-writer-xlsx | f6624ba598401f57d5d67de0f5db3c76b10266d7 | [
"Artistic-1.0-Perl"
] | null | null | null | t/regression/chart_errorbars03.t | robertholl/excel-writer-xlsx | f6624ba598401f57d5d67de0f5db3c76b10266d7 | [
"Artistic-1.0-Perl"
] | null | null | null | ###############################################################################
#
# Tests the output of Excel::Writer::XLSX against Excel generated files.
#
# reverse ('(c)'), December 2012, John McNamara, [email protected]
#
use lib 't/lib';
use TestFunctions qw(_compare_xlsx_files _is_deep_diff);
use strict;
use warnings;
use Test::More tests => 1;
###############################################################################
#
# Tests setup.
#
my $filename = 'chart_errorbars03.xlsx';
my $dir = 't/regression/';
my $got_filename = $dir . "ewx_$filename";
my $exp_filename = $dir . 'xlsx_files/' . $filename;
my $ignore_members = [];
my $ignore_elements = {};
###############################################################################
#
# Test the creation of an Excel::Writer::XLSX file with error bars.
#
use Excel::Writer::XLSX;
my $workbook = Excel::Writer::XLSX->new( $got_filename );
my $worksheet = $workbook->add_worksheet();
my $chart = $workbook->add_chart( type => 'line', embedded => 1 );
# For testing, copy the randomly generated axis ids in the target xlsx file.
$chart->{_axis_ids} = [ 52288896, 53605504 ];
my $data = [
[ 1, 2, 3, 4, 5 ],
[ 2, 4, 6, 8, 10 ],
[ 3, 6, 9, 12, 15 ],
];
$worksheet->write( 'A1', $data );
$chart->add_series(
categories => '=Sheet1!$A$1:$A$5',
values => '=Sheet1!$B$1:$B$5',
y_error_bars => {
type => 'standard_error',
line => { color => 'red', dash_type => 'round_dot' }
},
);
$chart->add_series(
categories => '=Sheet1!$A$1:$A$5',
values => '=Sheet1!$C$1:$C$5',
);
$worksheet->insert_chart( 'E9', $chart );
$workbook->close();
###############################################################################
#
# Compare the generated and existing Excel files.
#
my ( $got, $expected, $caption ) = _compare_xlsx_files(
$got_filename,
$exp_filename,
$ignore_members,
$ignore_elements,
);
_is_deep_diff( $got, $expected, $caption );
###############################################################################
#
# Cleanup.
#
unlink $got_filename;
__END__
| 22.092784 | 79 | 0.4993 |
ed59b6b9b999e0ff39ae5009ceadd65af18ed945 | 2,726 | pl | Perl | examples/chat/flood.pl | jmico/beekeeper | eca0949e892df8f6e23a2fc8a69f89545ea1b7b4 | [
"Artistic-1.0"
] | 9 | 2020-05-14T22:19:06.000Z | 2021-12-22T23:37:26.000Z | examples/chat/flood.pl | jmico/beekeeper | eca0949e892df8f6e23a2fc8a69f89545ea1b7b4 | [
"Artistic-1.0"
] | 2 | 2021-04-28T05:54:43.000Z | 2021-12-23T03:07:09.000Z | examples/chat/flood.pl | jmico/beekeeper | eca0949e892df8f6e23a2fc8a69f89545ea1b7b4 | [
"Artistic-1.0"
] | 2 | 2021-08-11T10:55:35.000Z | 2022-01-21T00:26:55.000Z | #!/usr/bin/perl -wT
use strict;
use warnings;
BEGIN { unshift @INC, ($ENV{'PERL5LIB'} =~ m/([^:]+)/g); }
use MyApp::Bot;
use Time::HiRes 'time';
use Term::ReadKey;
use Getopt::Long;
ReadMode "cbreak";
END { ReadMode "restore" }
my ($opt_clients, $opt_rate, $opt_size, $opt_help);
my $no_args = (@ARGV == 0) ? 1 : 0;
GetOptions(
"clients=i" => \$opt_clients, # --clients
"rate=i" => \$opt_rate, # --rate
"size=i" => \$opt_size, # --size
"help" => \$opt_help, # --help
) or exit;
my $Help = "
Usage: flood [OPTIONS]
Create a lot of clients who flood with messages each other.
-c, --clients N number of client connections to use
-r, --rate N sustain a rate of N requests per second among all clients
-s, --size N size in KiB of requests, default is 0
-h, --help display this help and exit
To create a 100 clients sending in total 500 messages per second to each other:
flood -c 100 -r 500
";
if ($opt_help || $no_args) {
print $Help;
exit;
}
my $num_bots = $opt_clients || 50;
my $msg_sec = $opt_rate || 100;
my $msg_size = $opt_size || 0;
my @Bots;
my $DEBUG = 0;
foreach my $n (1..$num_bots) {
my $username = sprintf("bot-%.3d", $n);
push @Bots, MyApp::Bot->new(
username => $username,
on_message => sub {
my (%args) = @_;
return unless $DEBUG;
my $message = $args{'message'};
my $from = $args{'from'} ? "$args{from}:" : ">";
print "$from $message\n";
},
);
}
print "$num_bots clients are sending $msg_sec requests per second.\n";
print "Workers are handling $msg_sec calls per second.\n";
print "Routers are handling " . ($msg_sec * 2) . " messages per second.\n";
print "(press any key to stop)\n";
$| = 1; # autoflush progress dots
while (1) {
print '.';
my $start_on = time();
for (1..$msg_sec) {
my $bot_A = $Bots[rand($num_bots)];
my $bot_B = $Bots[rand($num_bots)];
my $msg = $msg_size ? 'X' x ($msg_size * 1024) : 'Hello ' . $bot_B->username;
$bot_A->talk(
to_user => $bot_B->username,
message => $msg,
);
}
my $cv = AnyEvent->condvar;
AnyEvent::postpone { $cv->send };
$cv->recv;
my $key = ReadKey(-1);
if ($key) {
print "\n";
last;
}
my $took = time() - $start_on;
if ($took > 1) {
my $ovl = int(abs(($took - 1) * 100));
print "Cannot sustain $msg_sec msg/s ($ovl\% overload)\n";
next;
}
my $wait = 1 - $took;
$cv = AnyEvent->condvar;
AnyEvent->now_update;
my $tmr = AnyEvent->timer( after => $wait, cb => $cv);
$cv->recv;
}
1;
| 22.907563 | 85 | 0.54292 |
eda31009536842ad1068c30d632eebce325895ec | 6,329 | al | Perl | test/3_Pages/MyWizard.al | srenders/AL-Demos | aa28f3886d9da005a35cc6272ef80744e7d4670f | [
"MIT"
] | 9 | 2019-01-18T13:00:37.000Z | 2021-03-26T02:47:21.000Z | test/3_Pages/MyWizard.al | srenders/AL-Demos | aa28f3886d9da005a35cc6272ef80744e7d4670f | [
"MIT"
] | null | null | null | test/3_Pages/MyWizard.al | srenders/AL-Demos | aa28f3886d9da005a35cc6272ef80744e7d4670f | [
"MIT"
] | 11 | 2018-06-04T21:33:41.000Z | 2021-11-23T00:30:07.000Z | page 50100 "My Wizard"
{
CaptionML=ENU='My Page Wizard';
PageType = NavigatePage;
SourceTable = "Company Information";
SourceTableTemporary=true;
layout
{
area(content)
{
group(Group96)
{
CaptionML=ENU='';
Editable=false;
Visible=TopBannerVisible AND NOT Step3Visible;
field(MediaResourcesStandard;MediaResourcesStandard."Media Reference")
{
ApplicationArea=Basic,Suite,Invoicing;
Editable=false;
ShowCaption=false;
}
}
group(Group98)
{
CaptionML=ENU='';
Editable=false;
Visible=TopBannerVisible AND Step3Visible;
field(MediaResourcesDone;MediaResourcesDone."Media Reference")
{
ApplicationArea=Basic,Suite,Invoicing;
Editable=false;
ShowCaption=false;
}
}
group(Step1)
{
Visible=Step1Visible;
group("Welcome to PageName")
{
CaptionML=ENU='Welcome to PageName Setup';
Visible=Step1Visible;
group(Group18)
{
CaptionML=ENU='';
InstructionalTextML=ENU='Step1 - Replace this text with some instructions.';
}
}
group("Let's go!")
{
CaptionML=ENU='Let''s go!';
group(Group22)
{
CaptionML=ENU='';
InstructionalTextML=ENU='Step1 - Replace this text with some more instructions.';
}
}
}
group(Step2)
{
CaptionML=ENU='';
InstructionalTextML=ENU='Step2 - Replace this text with some instructions.';
Visible=Step2Visible;
//You might want to add fields here
field(Name;Name)
{
}
}
group(Step3)
{
Visible=Step3Visible;
group(Group23)
{
CaptionML=ENU='';
InstructionalTextML=ENU='Step3 - Replace this text with some instructions.';
}
group("That's it!")
{
CaptionML=ENU='That''s it!';
group(Group25)
{
CaptionML=ENU='';
InstructionalTextML=ENU='To save this setup, choose Finish.';
}
}
}
}
}
actions
{
area(processing)
{
action(ActionBack)
{
ApplicationArea=All;
CaptionML=ENU='Back';
Enabled=BackActionEnabled;
Image=PreviousRecord;
InFooterBar=true;
trigger OnAction();
begin
NextStep(TRUE);
end;
}
action(ActionNext)
{
ApplicationArea=All;
CaptionML=ENU='Next';
Enabled=NextActionEnabled;
Image=NextRecord;
InFooterBar=true;
trigger OnAction();
begin
NextStep(FALSE);
end;
}
action(ActionFinish)
{
ApplicationArea=Basic,Suite,Invoicing;
CaptionML=ENU='Finish';
Enabled=FinishActionEnabled;
Image=Approve;
InFooterBar=true;
trigger OnAction();
begin
FinishAction;
end;
}
}
}
trigger OnInit();
begin
LoadTopBanners;
end;
trigger OnOpenPage();
var
RecordVar : Record "Company Information";
begin
INIT;
IF RecordVar.GET THEN BEGIN
TRANSFERFIELDS(RecordVar);
END;
INSERT;
Step := Step::Start;
EnableControls;
end;
var
MediaRepositoryStandard : Record 9400;
MediaRepositoryDone : Record 9400;
MediaResourcesStandard : Record 2000000182;
MediaResourcesDone : Record 2000000182;
Step : Option Start,Step2,Finish;
TopBannerVisible : Boolean;
Step1Visible : Boolean;
Step2Visible : Boolean;
Step3Visible : Boolean;
FinishActionEnabled : Boolean;
BackActionEnabled : Boolean;
NextActionEnabled : Boolean;
local procedure EnableControls();
begin
ResetControls;
CASE Step OF
Step::Start:
ShowStep1;
Step::Step2:
ShowStep2;
Step::Finish:
ShowStep3;
END;
end;
local procedure StoreRecordVar();
var
RecordVar : Record "Company Information";
begin
IF NOT RecordVar.GET THEN BEGIN
RecordVar.INIT;
RecordVar.INSERT;
END;
RecordVar.TRANSFERFIELDS(Rec,FALSE);
RecordVar.MODIFY(TRUE);
COMMIT;
end;
local procedure FinishAction();
begin
StoreRecordVar;
CurrPage.CLOSE;
end;
local procedure NextStep(Backwards : Boolean);
begin
IF Backwards THEN
Step := Step - 1
ELSE
Step := Step + 1;
EnableControls;
end;
local procedure ShowStep1();
begin
Step1Visible := TRUE;
FinishActionEnabled := FALSE;
BackActionEnabled := FALSE;
end;
local procedure ShowStep2();
begin
Step2Visible := TRUE;
end;
local procedure ShowStep3();
begin
Step3Visible := TRUE;
NextActionEnabled := FALSE;
FinishActionEnabled := TRUE;
end;
local procedure ResetControls();
begin
FinishActionEnabled := FALSE;
BackActionEnabled := TRUE;
NextActionEnabled := TRUE;
Step1Visible := FALSE;
Step2Visible := FALSE;
Step3Visible := FALSE;
end;
local procedure LoadTopBanners();
begin
IF MediaRepositoryStandard.GET('AssistedSetup-NoText-400px.png',FORMAT(CURRENTCLIENTTYPE)) AND
MediaRepositoryDone.GET('AssistedSetupDone-NoText-400px.png',FORMAT(CURRENTCLIENTTYPE))
THEN
IF MediaResourcesStandard.GET(MediaRepositoryStandard."Media Resources Ref") AND
MediaResourcesDone.GET(MediaRepositoryDone."Media Resources Ref")
THEN
TopBannerVisible := MediaResourcesDone."Media Reference".HASVALUE;
end;
} | 25.01581 | 105 | 0.535314 |
edc32c7237c01c11335c83f0d0801758320447c4 | 7,701 | pm | Perl | modules/Bio/EnsEMBL/Compara/PipeConfig/Example/TuataraProteinTrees_conf.pm | MatBarba/ensembl-compara | e7b0ac16adca6849934b15bc37e58603be3690ff | [
"Apache-2.0"
] | null | null | null | modules/Bio/EnsEMBL/Compara/PipeConfig/Example/TuataraProteinTrees_conf.pm | MatBarba/ensembl-compara | e7b0ac16adca6849934b15bc37e58603be3690ff | [
"Apache-2.0"
] | null | null | null | modules/Bio/EnsEMBL/Compara/PipeConfig/Example/TuataraProteinTrees_conf.pm | MatBarba/ensembl-compara | e7b0ac16adca6849934b15bc37e58603be3690ff | [
"Apache-2.0"
] | null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=head1 NAME
Bio::EnsEMBL::Compara::PipeConfig::Example::QfoBlastProteinTrees_conf
=head1 DESCRIPTION
Parameters to run the ProteinTrees pipeline on the Quest-for-Orthologs dataset using
a all-vs-all blast clustering
=head1 CONTACT
Please contact Compara with questions/suggestions
=cut
package Bio::EnsEMBL::Compara::PipeConfig::Example::TuataraProteinTrees_conf;
use strict;
use warnings;
use base ('Bio::EnsEMBL::Compara::PipeConfig::Example::NoMasterProteinTrees_conf');
sub default_options {
my ($self) = @_;
return {
%{$self->SUPER::default_options}, # inherit the Ensembl ones
#Ensembl core databases:
'homo_sapiens' => {
-host => "ensdb-web-16",
-port => 5377,
-user => "ensro",
-db_version => 86,
-dbname => "homo_sapiens_core_86_37",
-species => "homo_sapiens"
},
'gallus_gallus' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "gallus_gallus_core_85_4",
-species => "gallus_gallus"
},
'meleagris_gallopavo' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "meleagris_gallopavo_core_85_21",
-species => "meleagris_gallopavo"
},
'anas_platyrhynchos' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "anas_platyrhynchos_core_85_1",
-species => "anas_platyrhynchos"
},
'taeniopygia_guttata' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "taeniopygia_guttata_core_85_1",
-species => "taeniopygia_guttata"
},
'ficedula_albicollis' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "ficedula_albicollis_core_85_1",
-species => "ficedula_albicollis"
},
'pelodiscus_sinensis' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "pelodiscus_sinensis_core_85_1",
-species => "pelodiscus_sinensis"
},
'anolis_carolinensis' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "anolis_carolinensis_core_85_2",
-species => "anolis_carolinensis"
},
'monodelphis_domestica' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "monodelphis_domestica_core_85_5",
-species => "monodelphis_domestica"
},
'ornithorhynchus_anatinus' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "ornithorhynchus_anatinus_core_85_1",
-species => "ornithorhynchus_anatinus"
},
'danio_rerio' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "danio_rerio_core_85_10",
-species => "danio_rerio"
},
'lepisosteus_oculatus' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "lepisosteus_oculatus_core_85_1",
-species => "lepisosteus_oculatus"
},
'mus_musculus' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "mus_musculus_core_85_38",
-species => "mus_musculus"
},
'takifugu_rubripes' => {
-host => "ens-livemirror",
-port => 3306,
-user => "ensro",
-db_version => 85,
-dbname => "xenopus_tropicalis_core_85_42",
-species => "xenopus_tropicalis"
},
#if collection is set both 'curr_core_dbs_locs' and 'curr_core_sources_locs' parameters are set to undef otherwise the are to use the default pairwise values
'curr_core_sources_locs' => [
$self->o('gallus_gallus'), $self->o('meleagris_gallopavo'),
$self->o('anas_platyrhynchos'), $self->o('taeniopygia_guttata'),
$self->o('ficedula_albicollis'), $self->o('pelodiscus_sinensis'),
$self->o('anolis_carolinensis'), $self->o('monodelphis_domestica'),
$self->o('homo_sapiens'), $self->o('ornithorhynchus_anatinus'),
$self->o('danio_rerio'), $self->o('lepisosteus_oculatus'),
$self->o('takifugu_rubripes'), $self->o('mus_musculus'),
],
# custom pipeline name, in case you don't like the default one
'pipeline_name' => 'Tuatara_ProteinTree_'.$self->o('rel_with_suffix'),
# Tag attached to every single tree
'division' => 'tuatara',
#Since we are loading members from FASTA files, we dont have the dna_frags, so we need to allow it to be missing.
'allow_missing_coordinates' => 0,
#Compara server to be used
'host' => 'compara4',
# NOTE: The databases referenced in the following arrays have to be hashes (not URLs)
# Add the database entries for the current core databases and link 'curr_core_sources_locs' to them
'curr_file_sources_locs' => [ '/homes/mateus/ENSEMBL/master/ensembl-compara/scripts/examples/tuatara_source.json' ], # It can be a list of JSON files defining an additionnal set of species
};
}
1;
| 38.123762 | 202 | 0.494351 |
ed31d943185428f4c0fc8ea8b716f5f13a3c7a99 | 4,538 | pm | Perl | modules/Bio/EnsEMBL/Variation/Pipeline/Remapping/FilterReadCoverageMapping.pm | fayerodgers/ensembl-variation | 566bbdcda60d91ad4c0afa00e13882bdbd81c14e | [
"Apache-2.0"
] | null | null | null | modules/Bio/EnsEMBL/Variation/Pipeline/Remapping/FilterReadCoverageMapping.pm | fayerodgers/ensembl-variation | 566bbdcda60d91ad4c0afa00e13882bdbd81c14e | [
"Apache-2.0"
] | null | null | null | modules/Bio/EnsEMBL/Variation/Pipeline/Remapping/FilterReadCoverageMapping.pm | fayerodgers/ensembl-variation | 566bbdcda60d91ad4c0afa00e13882bdbd81c14e | [
"Apache-2.0"
] | null | null | null | =head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2019] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
package Bio::EnsEMBL::Variation::Pipeline::Remapping::FilterReadCoverageMapping;
use strict;
use warnings;
use FileHandle;
use Bio::EnsEMBL::Registry;
use base ('Bio::EnsEMBL::Variation::Pipeline::Remapping::FilterMapping');
sub fetch_input {
my $self = shift;
}
sub run {
my $self = shift;
$self->report_failed_read_coverage_mappings();
$self->filter_read_coverage_mapping_results();
$self->join_read_coverage_data();
$self->SUPER::write_statistics();
}
sub write_output {
my $self = shift;
}
sub report_failed_read_coverage_mappings {
my $self = shift;
$self->SUPER::report_failed_read_mappings;
}
sub filter_read_coverage_mapping_results {
my $self = shift;
$self->SUPER::filter_read_mapping_results;
}
sub join_read_coverage_data {
my $self = shift;
my $file_init_feature = $self->param('file_init_feature');
my $fh_init_feature = FileHandle->new($file_init_feature, 'r');
my $read_coverage_data = {};
while (<$fh_init_feature>) {
chomp;
my $data = $self->read_data($_);
my $key = $data->{entry};
$read_coverage_data->{$key} = $data;
}
$fh_init_feature->close();
# get new seq_region_ids
my $seq_region_ids = {};
my $cdba = $self->param('cdba');
my $sa = $cdba->get_SliceAdaptor;
my $slices = $sa->fetch_all('toplevel', undef, 1);
foreach my $slice (@$slices) {
$seq_region_ids->{$slice->seq_region_name} = $slice->get_seq_region_id;
}
# new individual_id
my $individual_name_oldasm = $self->param('individual_name');
my $old_individual_id = $self->param('individual_id');
my $vdba = $self->param('vdba');
my $ia = $vdba->get_IndividualAdaptor;
my $individuals_newasm = $ia->fetch_all_by_name($individual_name_oldasm);
# my $individual_newasm = $individuals_newasm->[0];
# my $new_individual_id = $individual_newasm->dbID();
my $individual_newasm;
my $new_individual_id;
if ((scalar @$individuals_newasm) > 1) {
$individual_newasm = $ia->fetch_by_dbID($old_individual_id);
if ($individual_newasm->name eq $individual_name_oldasm) {
$new_individual_id = $old_individual_id;
} else {
die "More than one name for $individual_name_oldasm in new database";
}
}
$individual_newasm = $individuals_newasm->[0];
$new_individual_id = $individual_newasm->dbID();
# join feature data with mapping data:
my $file_load_features = $self->param('file_load_features');
my $fh_load_features = FileHandle->new($file_load_features, 'w');
my $file_filtered_mappings = $self->param('file_filtered_mappings');
my $fh_mappings = FileHandle->new($file_filtered_mappings, 'r');
my ($data, $variation_feature_id, $version, $variation_name);
while (<$fh_mappings>) {
chomp;
my ($entry, $seq_name, $start, $end, $strand, $score) = split("\t", $_);
my $seq_region_id = $seq_region_ids->{$seq_name};
$data = $read_coverage_data->{$entry};
if ($start > $end) {
$self->warning("Swap start end for $start $end");
($start, $end) = ($end, $start);
}
$data->{seq_region_id} = $seq_region_id;
$data->{seq_region_start} = $start;
$data->{seq_region_end} = $end;
$data->{individual_id} = $new_individual_id;
my @output = ();
foreach my $column_name (sort keys %$data) {
unless ($column_name =~ /^individual_name$/ || $column_name =~ /^seq_region_name$/ || $column_name =~ /^entry$/) {
push @output, $data->{$column_name};
}
}
my $line = join("\t", @output);
print $fh_load_features $line, "\n";
}
$fh_mappings->close();
$fh_load_features->close();
}
1;
| 31.734266 | 120 | 0.697223 |
edd862b1edc9fde959879bf39351f23d4f469478 | 5,282 | pl | Perl | prolog/configs.pl | hpistor/chupacabra | d5d91a5398ec6a1174b6cf6f0087aca24605dd63 | [
"Apache-2.0"
] | null | null | null | prolog/configs.pl | hpistor/chupacabra | d5d91a5398ec6a1174b6cf6f0087aca24605dd63 | [
"Apache-2.0"
] | 1 | 2021-08-23T20:43:04.000Z | 2021-08-23T20:43:04.000Z | prolog/configs.pl | hpistor/chupacabra | d5d91a5398ec6a1174b6cf6f0087aca24605dd63 | [
"Apache-2.0"
] | null | null | null | /*
[([Configs1], [Vulns1]), ([Configs2], [Vulns2]), ..., ([ConfigsN], [VulnsN])]
*/successivelyMergeConfigs(StartingConfig, [(Config, _)|T], FinalMerged) :-
checkConfigs(StartingConfig, Config, Merged), !,
successivelyMergeConfigs(Merged, T, FinalMerged).
successivelyMergeConfigs(Config, [], Config).
% assign a new config to each path
updateConfigs(_, [], []).
updateConfigs(Config, [(_, Vulns)|Rest], [(Config, Vulns)|Rest2]) :-
updateConfigs(Config, Rest, Rest2).
groupPathsByConfigsStep([], []).
groupPathsByConfigsStep([Paths|RestPaths], [UpdatedPaths|RestMerged]) :-
Paths=[(Config, _)|_],
select(MatchingPaths, RestPaths, UncheckedPaths),
append(Paths, MatchingPaths, TestingPaths),
successivelyMergeConfigs(Config, TestingPaths, Merged),
updateConfigs(Merged, TestingPaths, UpdatedPaths),
groupPathsByConfigsStep(UncheckedPaths, RestMerged).
groupPathsByConfigsStep([Paths|RestPaths], [Paths|RestMerged]) :-
groupPathsByConfigsStep(RestPaths, RestMerged).
groupPathsByConfigs(Paths, Result) :-
groupPathsByConfigsStep(Paths, NewPaths),
dif(Paths, NewPaths), !,
groupPathsByConfigs(NewPaths, Result).
groupPathsByConfigs(Paths, Paths).
%checkConfigs(AcceptedConfigs, PendingConfigs, NewConfigs)
checkConfigs([], PendingConfigs, PendingConfigs).
checkConfigs([H|T], [], [H|T]).
checkConfigs(AcceptedConfigs, PendingConfigs, SortedConfigs) :-
select(K-PendingVals, PendingConfigs, RestPendingConfigs),
\+ member(K-_, AcceptedConfigs),
checkConfigs(AcceptedConfigs, RestPendingConfigs, TmpConfigs),
NewConfigs=[K-PendingVals|TmpConfigs],
sort(NewConfigs, SortedConfigs).
checkConfigs(AcceptedConfigs, PendingConfigs, SortedConfigs) :-
select(K-PendingVals, PendingConfigs, RestPendingConfigs),
select(K-AcceptedVals, AcceptedConfigs, RestAcceptedConfigs),
checkConfigs(RestAcceptedConfigs, RestPendingConfigs, TmpConfigs),
mergeConfigs(AcceptedVals, PendingVals, MergedConfigs),
NewConfigs=[K-MergedConfigs|TmpConfigs],
sort(NewConfigs, SortedConfigs).
only(PriorVals, _, ThisVals, _, _, only) :-
union(PriorVals, ThisVals, AllVals),
length(AllVals, L),
L=<1.
exists(_, exists, _, _, _, exists).
exists(PriorVals, only, ThisVals, _, _, only) :-
union(PriorVals, ThisVals, PriorVals).
mergeConfig(Key, (Pred, PriorVals), (ThisPred, ThisVals), Config, Result) :-
Check=..[Pred, PriorVals, ThisPred, ThisVals, Key, Config, NewPred],
call(Check),
union(PriorVals, ThisVals, AllVals),
sort(AllVals, SortedVals),
Result=(NewPred, SortedVals).
mergeConfigs([], ThisConfig, ThisConfig).
mergeConfigs(PriorConfig, [], PriorConfig).
mergeConfigs(PriorConfig, ThisConfig, SortedConfig) :-
select(K-ThisVals, ThisConfig, RestThisConfig),
\+ member(K-_, PriorConfig),
mergeConfigs(PriorConfig, RestThisConfig, TmpConfig),
NewConfig=[K-ThisVals|TmpConfig],
sort(NewConfig, SortedConfig).
mergeConfigs(PriorConfig, ThisConfig, SortedConfig) :-
select(K-ThisVals, ThisConfig, RestThisConfig),
select(K-PriorVals, PriorConfig, RestPriorConfig),
mergeConfigs(RestPriorConfig, RestThisConfig, TmpConfig),
mergeConfig(K, PriorVals, ThisVals, PriorConfig, NewVals),
NewConfig=[K-NewVals|TmpConfig],
sort(NewConfig, SortedConfig).
realizeConfigFromParams([], _, []).
realizeConfigFromParams([Key-Vals|ConfigRest], Params, [Key-ValsRealized|ConfigRealizedRest]) :-
realizeKeysValsFromParams(Vals, Params, ValsRealized),
realizeConfigFromParams(ConfigRest, Params, ConfigRealizedRest).
realizeKeysValsFromParams([], _, []).
realizeKeysValsFromParams([Key-(Quantifier, Vals)|KeysValsRest], Params, [Key-(Quantifier, ValsRealized)|KeysValsRealizedRest]) :-
realizeValsFromParams(Vals, Params, ValsRealized),
realizeKeysValsFromParams(KeysValsRest, Params, KeysValsRealizedRest).
realizeValsFromParams([], _, []).
realizeValsFromParams([Val|ValsRest], Params, [ValRealized|ValsRealizedRest]) :-
( string(Val)
-> ValRealized=Val
; list_to_assoc(Params, Assoc),
call(Val, Assoc, ValRealized)
),
realizeValsFromParams(ValsRest, Params, ValsRealizedRest).
% generates an atom from a list of atoms, at random
generateFromList(List, Length, Output) :-
length(Output, Length),
length(List, N1),
maplist(random_char_generate(List, N1), Output).
random_char_generate(List, N, Char) :-
random(0, N, X),
nth0(X, List, Char).
% generates a username from username list, defined below
% (params not used)
generateUsername(_, Username) :-
usernames(Usernames),
generateFromList(Usernames, 1, Output),
nth0(0, Output, UsernameAtom),
atom_string(UsernameAtom, Username).
% generates a password, pulling letters from a dictionary
generatePassword(Params, Password) :-
get_assoc(paramPasswordLength, Params, Length),
passwords(Passwords),
generateFromList(Passwords, Length, Output),
string_chars(Password, Output).
passwords([a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0']).
usernames([admin, jane, john, guest]).
| 41.590551 | 270 | 0.712609 |
eda67fa9bdb35fdb99acd1d617a7765b6e0bbb6f | 2,211 | pl | Perl | 5-demultiplex_blast.pl | EnzymeFunctionInitiative/est-precompute-bw | 7d58c0186eed1e88ab9f44d708cbb827eae5cfd8 | [
"Apache-2.0"
] | null | null | null | 5-demultiplex_blast.pl | EnzymeFunctionInitiative/est-precompute-bw | 7d58c0186eed1e88ab9f44d708cbb827eae5cfd8 | [
"Apache-2.0"
] | null | null | null | 5-demultiplex_blast.pl | EnzymeFunctionInitiative/est-precompute-bw | 7d58c0186eed1e88ab9f44d708cbb827eae5cfd8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env perl
#program to re-add sequences removed by initial cdhit
#version 0.9.3 Program created
use Getopt::Long;
use File::Basename;
use warnings;
my $dir = shift @ARGV;
my $unit = basename($dir);
my $cluster = "$dir/fasta/$unit.cdhit.fa.clstr";
my $blastin = "$dir/1out/1.out";
my $blastout = "$dir/1out/2.out";
if(-s $blastout){
die "$blastout already exists\n";
}
#$result=GetOptions ("cluster=s" => \$cluster,
# "blastin=s" => \$blastin,
# "blastout=s" => \$blastout);
#parse cluster file to get parent/child sequence associations
#create new 1.out file from input 1.out that populates associations
open CLUSTER, $cluster or die "cannot open cdhit cluster file $cluster\n";
open BLASTIN, $blastin or die "cannot open blast input file $blastin\n";
open BLASTOUT, ">$blastout.tmp" or die "cannnot write to blast output file $blastout\n";
%tree=();
#parse the clstr file
print "Read in clusters\n";
while(<CLUSTER>){
my $line=$_;
chomp $line;
if($line=~/^>/){
#print "New Cluster\n";
if(defined $head){
@{$tree{$head}}=@children;
}
@children=();
}elsif($line=~/ >(.+)\.\.\. \*$/){
#print "head\t$1\n";
push @children, $1;
$head=$1;
print "$1\n";
}elsif($line=~/^\d+.*>(.+)\.\.\. at/){
#print "child\t$1\n";
push @children, $1;
}else{
die "died at $line\n";
}
}
@{$tree{$head}}=@children;
print "Demultiplex blast\n";
#read BLASTIN and expand with clusters from cluster file to create demultiplexed file
while(<BLASTIN>){
my $line=$_;
chomp $line;
my @lineary=split /\s+/, $line;
$linesource=shift @lineary;
$linetarget=shift @lineary;
print "$linesource\t$linetarget\n";
if($linesource eq $linetarget){
for(my $i=0;$i<scalar @{$tree{$linesource}};$i++){
for(my $j=$i+1;$j<scalar @{$tree{$linesource}};$j++){
print BLASTOUT "@{$tree{$linesource}}[$i]\t@{$tree{$linesource}}[$j]\t".join("\t", @lineary)."\n";
print "likewise demux\t@{$tree{$linesource}}[$i]\t@{$tree{$linesource}}[$j]\n";
}
}
}else{
foreach my $source (@{$tree{$linesource}}){
foreach my $target (@{$tree{$linetarget}}){
print BLASTOUT "$source\t$target\t".join("\t", @lineary)."\n";
}
}
}
}
system("mv $blastout.tmp $blastout");
| 26.011765 | 103 | 0.62777 |
edbffc3ec2aa8a290b2cdbc8ddcc55f2bca064db | 2,944 | pl | Perl | misc/changelog.pl | TaylorCFrey/trafficcontrol | 308484ddfb37952901d8b138087d4b8f0c7b007d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT-0",
"MIT",
"BSD-3-Clause"
] | 598 | 2018-06-16T02:54:28.000Z | 2022-03-31T22:31:25.000Z | misc/changelog.pl | TaylorCFrey/trafficcontrol | 308484ddfb37952901d8b138087d4b8f0c7b007d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT-0",
"MIT",
"BSD-3-Clause"
] | 3,506 | 2018-06-13T16:39:39.000Z | 2022-03-29T18:31:31.000Z | misc/changelog.pl | TaylorCFrey/trafficcontrol | 308484ddfb37952901d8b138087d4b8f0c7b007d | [
"Apache-2.0",
"BSD-2-Clause",
"MIT-0",
"MIT",
"BSD-3-Clause"
] | 360 | 2018-06-13T20:08:42.000Z | 2022-03-31T10:37:47.000Z | #!/usr/bin/perl
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
use strict;
use warnings;
use WWW::Curl::Easy;
use JSON;
my $owner = shift;
my $repo = shift;
my $milestone = shift;
my $url = "https://api.github.com";
sub milestone_lookup
{
my $url = shift;
my $owner = shift;
my $repo = shift;
my $milestone_title = shift;
my $endpoint = "/repos/$owner/$repo/milestones";
my $params = "state=all";
my $resp_body;
my $curl = WWW::Curl::Easy->new;
#$curl->setopt(CURLOPT_VERBOSE, 1);
$curl->setopt(CURLOPT_HTTPHEADER, ['Accept: application/vnd.github.v3+json', 'User-Agent: Awesome-Octocat-App']);
$curl->setopt(CURLOPT_WRITEDATA, \$resp_body);
$curl->setopt(CURLOPT_URL, $url . $endpoint . '?' . $params);
my $retcode = $curl->perform();
if ($retcode == 0 && $curl->getinfo(CURLINFO_HTTP_CODE) == 200)
{
my $milestones = from_json($resp_body);
foreach my $milestone (@{ $milestones })
{
if ($milestone->{title} eq $milestone_title)
{
return $milestone->{number};
}
}
}
return undef;
}
sub issue_search
{
my $url = shift;
my $owner = shift;
my $repo = shift;
my $milestone_id = shift;
my $page = shift;
my $endpoint = "/repos/$owner/$repo/issues";
my $params = "milestone=$milestone_id&state=closed&page=$page";
my $resp_body;
my $curl = WWW::Curl::Easy->new;
#$curl->setopt(CURLOPT_VERBOSE, 1);
$curl->setopt(CURLOPT_HTTPHEADER, ['Accept: application/vnd.github.v3+json', 'User-Agent: Awesome-Octocat-App']);
$curl->setopt(CURLOPT_WRITEDATA, \$resp_body);
$curl->setopt(CURLOPT_URL, $url . $endpoint . '?' . $params);
my $retcode = $curl->perform();
if ($retcode == 0 && $curl->getinfo(CURLINFO_HTTP_CODE) == 200) {
return from_json($resp_body);
}
undef;
}
my $milestone_id = milestone_lookup($url, $owner, $repo, $milestone);
if (!defined($milestone_id))
{
exit 1;
}
my $issues;
my $changelog;
my $page = 1;
do {
$issues = issue_search($url, $owner, $repo, $milestone_id, $page);
foreach my $issue (@{ $issues })
{
if (defined($issue))
{
push @{ $changelog }, {number => $issue->{number}, title => $issue->{title}};
}
}
$page++;
} while (scalar @{ $issues });
if (defined($changelog))
{
print "Changes with Traffic Control $milestone\n";
foreach my $issue (sort {$a->{number} <=> $b->{number}} @{ $changelog })
{
print " #$issue->{number} - $issue->{title}\n";
}
}
| 24.533333 | 115 | 0.643682 |
eddebb271df5b6e232d2b42a7206f4a1284e45d2 | 678 | al | Perl | Apps/W1/Shopify/app/src/Base/Pages/ShpfyTagFactbox.Page.al | manjulchauhan/ALAppExtensions | 3f2f1d6e5337188b1af9c0275420f1c1de036a7f | [
"MIT"
] | 127 | 2018-04-17T18:03:03.000Z | 2019-05-06T18:54:17.000Z | Apps/W1/Shopify/app/src/Base/Pages/ShpfyTagFactbox.Page.al | manjulchauhan/ALAppExtensions | 3f2f1d6e5337188b1af9c0275420f1c1de036a7f | [
"MIT"
] | 2,279 | 2018-09-12T12:01:49.000Z | 2019-05-06T13:59:35.000Z | Apps/W1/Shopify/app/src/Base/Pages/ShpfyTagFactbox.Page.al | Enavate-EPS-Product/ALAppExtensions | 20136cf2ef6589ff3ce7b16776aed5e8823b76f2 | [
"MIT"
] | 41 | 2018-05-17T11:19:52.000Z | 2019-04-30T17:30:38.000Z | /// <summary>
/// Page Shpfy Tag Factbox (ID 30103).
/// </summary>
page 30103 "Shpfy Tag Factbox"
{
Caption = 'Shopify Tags';
DeleteAllowed = false;
InsertAllowed = false;
ModifyAllowed = false;
PageType = ListPart;
RefreshOnActivate = true;
SourceTable = "Shpfy Tag";
layout
{
area(content)
{
repeater(Group)
{
field(Tag; Rec.Tag)
{
ApplicationArea = All;
Caption = 'Tag';
ToolTip = 'Specifies the tags of a product that are used for filtering and search.';
}
}
}
}
} | 23.37931 | 104 | 0.485251 |
ede3fa013cd8dcaeda98f468fe1f51bb6479521a | 1,972 | pm | Perl | auto-lib/Paws/WAFRegional/ByteMatchSetUpdate.pm | agimenez/aws-sdk-perl | 9c4dff7d1af2ff0210c28ca44fb9e92bc625712b | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/WAFRegional/ByteMatchSetUpdate.pm | agimenez/aws-sdk-perl | 9c4dff7d1af2ff0210c28ca44fb9e92bc625712b | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/WAFRegional/ByteMatchSetUpdate.pm | agimenez/aws-sdk-perl | 9c4dff7d1af2ff0210c28ca44fb9e92bc625712b | [
"Apache-2.0"
] | null | null | null | package Paws::WAFRegional::ByteMatchSetUpdate;
use Moose;
has Action => (is => 'ro', isa => 'Str', required => 1);
has ByteMatchTuple => (is => 'ro', isa => 'Paws::WAFRegional::ByteMatchTuple', required => 1);
1;
### main pod documentation begin ###
=head1 NAME
Paws::WAFRegional::ByteMatchSetUpdate
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::WAFRegional::ByteMatchSetUpdate object:
$service_obj->Method(Att1 => { Action => $value, ..., ByteMatchTuple => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::WAFRegional::ByteMatchSetUpdate object:
$result = $service_obj->Method(...);
$result->Att1->Action
=head1 DESCRIPTION
In an UpdateByteMatchSet request, C<ByteMatchSetUpdate> specifies
whether to insert or delete a ByteMatchTuple and includes the settings
for the C<ByteMatchTuple>.
=head1 ATTRIBUTES
=head2 B<REQUIRED> Action => Str
Specifies whether to insert or delete a ByteMatchTuple.
=head2 B<REQUIRED> ByteMatchTuple => L<Paws::WAFRegional::ByteMatchTuple>
Information about the part of a web request that you want AWS WAF to
inspect and the value that you want AWS WAF to search for. If you
specify C<DELETE> for the value of C<Action>, the C<ByteMatchTuple>
values must exactly match the values in the C<ByteMatchTuple> that you
want to delete from the C<ByteMatchSet>.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::WAFRegional>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: https://github.com/pplu/aws-sdk-perl
Please report bugs to: https://github.com/pplu/aws-sdk-perl/issues
=cut
| 28.57971 | 108 | 0.749493 |
ed9e9e5c638af99ceed5362150aa1e0ed4573eb2 | 451 | pl | Perl | author/benchmark/bracketed2kanji_vs_decompose_parenthesized_kanji.pl | pawa-/Lingua-JA-NormalizeText | 8e6b12a45f7e08dfb40b52f3e93ef719024d1dad | [
"Artistic-1.0"
] | 1 | 2020-04-29T16:54:54.000Z | 2020-04-29T16:54:54.000Z | author/benchmark/bracketed2kanji_vs_decompose_parenthesized_kanji.pl | pawa-/Lingua-JA-NormalizeText | 8e6b12a45f7e08dfb40b52f3e93ef719024d1dad | [
"Artistic-1.0"
] | null | null | null | author/benchmark/bracketed2kanji_vs_decompose_parenthesized_kanji.pl | pawa-/Lingua-JA-NormalizeText | 8e6b12a45f7e08dfb40b52f3e93ef719024d1dad | [
"Artistic-1.0"
] | null | null | null | #!/usr/bin/env perl
use strict;
use warnings;
use utf8;
use Benchmark qw/cmpthese/;
use Lingua::JA::NormalizeText qw/decompose_parenthesized_kanji nfkc/;
use Lingua::JA::Moji qw/bracketed2kanji/;
my $text = '㈱株';
cmpthese(-1, {
'bracketed2kanji' => sub { bracketed2kanji($text) },
'decompose_parenthesized_kanji' => sub { decompose_parenthesized_kanji($text) },
'nfkc' => sub { nfkc($text) },
});
| 26.529412 | 84 | 0.634146 |
edbf9352fad5adc938a33f6d04388abf8b8ef5c6 | 546 | t | Perl | t/08-performance/05-processkeys.t | RayMPerry/rakudo | 1e2b0ec59500b845f26bd3399d12e2fb8b78dea4 | [
"Artistic-2.0"
] | null | null | null | t/08-performance/05-processkeys.t | RayMPerry/rakudo | 1e2b0ec59500b845f26bd3399d12e2fb8b78dea4 | [
"Artistic-2.0"
] | 1 | 2019-07-19T21:49:30.000Z | 2019-07-19T21:49:30.000Z | t/08-performance/05-processkeys.t | RayMPerry/rakudo | 1e2b0ec59500b845f26bd3399d12e2fb8b78dea4 | [
"Artistic-2.0"
] | null | null | null | use Test;
plan 1;
# output of "perl6 -e 'use Test; .say for PROCESS::.keys.sort.map: { qq:!c/ Q{$_},/ }'"
my %allowed = (
Q{$AWAITER},
Q{$CWD},
Q{$CORE-SETTING-REV},
Q{$ERR},
Q{$IN},
Q{$OUT},
Q{$PERL},
Q{$PID},
Q{$RAKUDO_MODULE_DEBUG},
Q{$REPO},
Q{$SCHEDULER},
Q{$SPEC},
Q{%ENV},
Q{&chdir},
).map: { $_ => 1 };
my @unknown;
@unknown.push($_) unless %allowed{$_}:exists for PROCESS::.keys;
diag "Found {+@unknown} unexpected entries: { @unknown.sort }" unless
ok @unknown == 0, "No unexpected entries in PROCESS::";
| 21 | 88 | 0.578755 |
eda1f53d031731e3647bfb3e495cd1ad3a058c4f | 1,073 | t | Perl | t/09_delete_message.t | ka2u/Net-Amazon-SQS-Lite | bcd723c73f371d693c1710a045e2afe5152a4fde | [
"Artistic-1.0"
] | 1 | 2015-07-23T05:45:24.000Z | 2015-07-23T05:45:24.000Z | t/09_delete_message.t | ka2u/Net-Amazon-SQS-Lite | bcd723c73f371d693c1710a045e2afe5152a4fde | [
"Artistic-1.0"
] | null | null | null | t/09_delete_message.t | ka2u/Net-Amazon-SQS-Lite | bcd723c73f371d693c1710a045e2afe5152a4fde | [
"Artistic-1.0"
] | null | null | null | use strict;
use Net::Amazon::SQS::Lite;
use Test::More 0.98;
use Time::Piece;
use Time::Seconds;
use URI;
my $sqs = Net::Amazon::SQS::Lite->new(
access_key => "XXXXX",
secret_key => "YYYYY",
region => "ap-northeast-1",
uri => URI->new("http://localhost:9324"),
);
SKIP: {
my $res;
eval {
$res = $sqs->list_queues;
};
skip $@, 1 if $@;
$sqs->create_queue({QueueName => "test_queue"});
$sqs->send_message({
QueueUrl => "http://localhost:9324/queue/test_queue",
MessageBody => "Hello!"
});
$res = $sqs->receive_message({
QueueUrl => "http://localhost:9324/queue/test_queue",
});
$res = $sqs->delete_message({
QueueUrl => "http://localhost:9324/queue/test_queue",
ReceiptHandle => $res->{ReceiveMessageResult}->{Message}->{ReceiptHandle},
VisibilityTimeout => 60
});
is $res->{ResponseMetadata}->{RequestId}, "00000000-0000-0000-0000-000000000000";
$res = $sqs->delete_queue({QueueUrl => "http://localhost:9324/queue/test_queue"});
};
done_testing;
| 26.825 | 86 | 0.598322 |
ed7314f1c079284e2f89d027246408fb23bf33ec | 4,877 | pl | Perl | perl/vendor/lib/auto/share/dist/DateTime-Locale/fr-VU.pl | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | null | null | null | perl/vendor/lib/auto/share/dist/DateTime-Locale/fr-VU.pl | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | 3 | 2021-01-27T10:09:28.000Z | 2021-05-11T21:20:12.000Z | perl/vendor/lib/auto/share/dist/DateTime-Locale/fr-VU.pl | Light2027/OnlineCampusSandbox | 8dcaaf62af1342470f9e7be6d42bd0f16eb910b8 | [
"Apache-2.0"
] | null | null | null | {
am_pm_abbreviated => [
"AM",
"PM",
],
available_formats => {
Bh => "h B",
Bhm => "h:mm B",
Bhms => "h:mm:ss B",
E => "E",
EBhm => "E h:mm B",
EBhms => "E h:mm:ss B",
EHm => "E HH:mm",
EHms => "E HH:mm:ss",
Ed => "E d",
Ehm => "E h:mm a",
Ehms => "E h:mm:ss a",
Gy => "y G",
GyMMM => "MMM y G",
GyMMMEd => "E d MMM y G",
GyMMMd => "d MMM y G",
H => "HH 'h'",
Hm => "HH:mm",
Hms => "HH:mm:ss",
Hmsv => "HH:mm:ss v",
Hmv => "HH:mm v",
M => "L",
MEd => "E dd/MM",
MMM => "LLL",
MMMEd => "E d MMM",
"MMMMW-count-one" => "'semaine' W (MMMM)",
"MMMMW-count-other" => "'semaine' W (MMMM)",
MMMMd => "d MMMM",
MMMd => "d MMM",
Md => "dd/MM",
d => "d",
h => "h a",
hm => "h:mm a",
hms => "h:mm:ss a",
hmsv => "h:mm:ss a v",
hmv => "h:mm a v",
ms => "mm:ss",
y => "y",
yM => "MM/y",
yMEd => "E dd/MM/y",
yMMM => "MMM y",
yMMMEd => "E d MMM y",
yMMMM => "MMMM y",
yMMMd => "d MMM y",
yMd => "dd/MM/y",
yQQQ => "QQQ y",
yQQQQ => "QQQQ y",
"yw-count-one" => "'semaine' w 'de' Y",
"yw-count-other" => "'semaine' w 'de' Y",
},
code => "fr-VU",
date_format_full => "EEEE d MMMM y",
date_format_long => "d MMMM y",
date_format_medium => "d MMM y",
date_format_short => "dd/MM/y",
datetime_format_full => "{1} '\N{U+00e0}' {0}",
datetime_format_long => "{1} '\N{U+00e0}' {0}",
datetime_format_medium => "{1} '\N{U+00e0}' {0}",
datetime_format_short => "{1} {0}",
day_format_abbreviated => [
"lun.",
"mar.",
"mer.",
"jeu.",
"ven.",
"sam.",
"dim.",
],
day_format_narrow => [
"L",
"M",
"M",
"J",
"V",
"S",
"D",
],
day_format_wide => [
"lundi",
"mardi",
"mercredi",
"jeudi",
"vendredi",
"samedi",
"dimanche",
],
day_stand_alone_abbreviated => [
"lun.",
"mar.",
"mer.",
"jeu.",
"ven.",
"sam.",
"dim.",
],
day_stand_alone_narrow => [
"L",
"M",
"M",
"J",
"V",
"S",
"D",
],
day_stand_alone_wide => [
"lundi",
"mardi",
"mercredi",
"jeudi",
"vendredi",
"samedi",
"dimanche",
],
era_abbreviated => [
"av. J.-C.",
"ap. J.-C.",
],
era_narrow => [
"av. J.-C.",
"ap. J.-C.",
],
era_wide => [
"avant J\N{U+00e9}sus-Christ",
"apr\N{U+00e8}s J\N{U+00e9}sus-Christ",
],
first_day_of_week => 1,
glibc_date_1_format => "%a %b %e %H:%M:%S %Z %Y",
glibc_date_format => "%m/%d/%y",
glibc_datetime_format => "%a %b %e %H:%M:%S %Y",
glibc_time_12_format => "%I:%M:%S %p",
glibc_time_format => "%H:%M:%S",
language => "French",
month_format_abbreviated => [
"janv.",
"f\N{U+00e9}vr.",
"mars",
"avr.",
"mai",
"juin",
"juil.",
"ao\N{U+00fb}t",
"sept.",
"oct.",
"nov.",
"d\N{U+00e9}c.",
],
month_format_narrow => [
"J",
"F",
"M",
"A",
"M",
"J",
"J",
"A",
"S",
"O",
"N",
"D",
],
month_format_wide => [
"janvier",
"f\N{U+00e9}vrier",
"mars",
"avril",
"mai",
"juin",
"juillet",
"ao\N{U+00fb}t",
"septembre",
"octobre",
"novembre",
"d\N{U+00e9}cembre",
],
month_stand_alone_abbreviated => [
"janv.",
"f\N{U+00e9}vr.",
"mars",
"avr.",
"mai",
"juin",
"juil.",
"ao\N{U+00fb}t",
"sept.",
"oct.",
"nov.",
"d\N{U+00e9}c.",
],
month_stand_alone_narrow => [
"J",
"F",
"M",
"A",
"M",
"J",
"J",
"A",
"S",
"O",
"N",
"D",
],
month_stand_alone_wide => [
"janvier",
"f\N{U+00e9}vrier",
"mars",
"avril",
"mai",
"juin",
"juillet",
"ao\N{U+00fb}t",
"septembre",
"octobre",
"novembre",
"d\N{U+00e9}cembre",
],
name => "French Vanuatu",
native_language => "fran\N{U+00e7}ais",
native_name => "fran\N{U+00e7}ais Vanuatu",
native_script => undef,
native_territory => "Vanuatu",
native_variant => undef,
quarter_format_abbreviated => [
"T1",
"T2",
"T3",
"T4",
],
quarter_format_narrow => [
1,
2,
3,
4,
],
quarter_format_wide => [
"1er trimestre",
"2e trimestre",
"3e trimestre",
"4e trimestre",
],
quarter_stand_alone_abbreviated => [
"T1",
"T2",
"T3",
"T4",
],
quarter_stand_alone_narrow => [
1,
2,
3,
4,
],
quarter_stand_alone_wide => [
"1er trimestre",
"2e trimestre",
"3e trimestre",
"4e trimestre",
],
script => undef,
territory => "Vanuatu",
time_format_full => "h:mm:ss a zzzz",
time_format_long => "h:mm:ss a z",
time_format_medium => "h:mm:ss a",
time_format_short => "h:mm a",
variant => undef,
version => 36,
}
| 17.864469 | 51 | 0.449457 |
edd9e54b4daa1f0096162a0c33d3c16a4a92b8f8 | 1,698 | t | Perl | t/regression/filehandle01.t | f20/excel-writer-xlsx | b08a865c6972f935b7d72e64e5580cca8e6cc299 | [
"Artistic-1.0-Perl"
] | 61 | 2015-02-03T02:49:53.000Z | 2022-02-13T09:17:53.000Z | t/regression/filehandle01.t | f20/excel-writer-xlsx | b08a865c6972f935b7d72e64e5580cca8e6cc299 | [
"Artistic-1.0-Perl"
] | 167 | 2015-01-02T09:25:11.000Z | 2022-02-16T22:04:20.000Z | t/regression/filehandle01.t | f20/excel-writer-xlsx | b08a865c6972f935b7d72e64e5580cca8e6cc299 | [
"Artistic-1.0-Perl"
] | 31 | 2015-02-16T12:06:45.000Z | 2021-10-14T13:03:22.000Z | ###############################################################################
#
# Tests the output of Excel::Writer::XLSX against Excel generated files.
#
# Copyright 2000-2021, John McNamara, [email protected]
#
use lib 't/lib';
use TestFunctions qw(_compare_xlsx_files _is_deep_diff);
use strict;
use warnings;
use Test::More tests => 1;
###############################################################################
#
# Tests setup.
#
my $filename = 'filehandle01.xlsx';
my $dir = 't/regression/';
my $got_filename = $dir . "ewx_$filename";
my $exp_filename = $dir . 'xlsx_files/' . $filename;
my $ignore_members = [];
my $ignore_elements = {};
###############################################################################
#
# Test the creation of a simple Excel::Writer::XLSX file as a filehandle.
#
use Excel::Writer::XLSX;
open my $fh, '>', \my $str or die "Failed to open str filehandle: $!";
my $workbook = Excel::Writer::XLSX->new( $fh );
my $worksheet = $workbook->add_worksheet();
$worksheet->write( 'A1', 'Hello' );
$worksheet->write( 'A2', 123 );
$workbook->close();
open my $out_fh, '>', $got_filename or die "Failed to open out filehandle: $!";
binmode $out_fh;
print $out_fh $str;
close $out_fh;
###############################################################################
#
# Compare the generated and existing Excel files.
#
my ( $got, $expected, $caption ) = _compare_xlsx_files(
$got_filename,
$exp_filename,
$ignore_members,
$ignore_elements,
);
_is_deep_diff( $got, $expected, $caption );
###############################################################################
#
# Cleanup.
#
unlink $got_filename;
__END__
| 22.342105 | 79 | 0.510601 |
ed93f4a66a1f7c6503544b914767cc88ffcd39be | 24,587 | t | Perl | t/Calculator.t | ecelis/zeroclickinfo-goodies | 1afbe5dc3612f38eb82a1a8a46c9d0cfa023b576 | [
"Apache-2.0"
] | null | null | null | t/Calculator.t | ecelis/zeroclickinfo-goodies | 1afbe5dc3612f38eb82a1a8a46c9d0cfa023b576 | [
"Apache-2.0"
] | null | null | null | t/Calculator.t | ecelis/zeroclickinfo-goodies | 1afbe5dc3612f38eb82a1a8a46c9d0cfa023b576 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env perl
use strict;
use warnings;
use Test::More;
use DDG::Test::Goodie;
use DDG::Goodie::Calculator; # For function subtests.
use utf8;
zci answer_type => 'calc';
zci is_cached => 1;
ddg_goodie_test(
[qw( DDG::Goodie::Calculator )],
'what is 2-2' => test_zci(
"2 - 2 = 0",
heading => 'Calculator',
structured_answer => {
input => ['2 - 2'],
operation => 'Calculate',
result => qr/>0</
}
),
'solve 2+2' => test_zci(
"2 + 2 = 4",
heading => 'Calculator',
structured_answer => {
input => ['2 + 2'],
operation => 'Calculate',
result => qr/>4</
}
),
'2^8' => test_zci(
"2 ^ 8 = 256",
heading => 'Calculator',
structured_answer => {
input => ['2 ^ 8'],
operation => 'Calculate',
result => qr/>256</
}
),
'2 *7' => test_zci(
"2 * 7 = 14",
heading => 'Calculator',
structured_answer => {
input => ['2 * 7'],
operation => 'Calculate',
result => qr/>14</
}
),
'4 ∙ 5' => test_zci(
"4 * 5 = 20",
heading => 'Calculator',
structured_answer => {
input => ['4 * 5'],
operation => 'Calculate',
result => qr/>20</
}
),
'6 ⋅ 7' => test_zci(
"6 * 7 = 42",
heading => 'Calculator',
structured_answer => {
input => ['6 * 7'],
operation => 'Calculate',
result => qr/>42</
}
),
'3 × dozen' => test_zci(
"3 * dozen = 36",
heading => 'Calculator',
structured_answer => {
input => ['3 * dozen'],
operation => 'Calculate',
result => qr/>36</
}
),
'dozen ÷ 4' => test_zci(
"dozen / 4 = 3",
heading => 'Calculator',
structured_answer => {
input => ['dozen / 4'],
operation => 'Calculate',
result => qr/>3</
}
),
'1 dozen * 2' => test_zci(
"1 dozen * 2 = 24",
heading => 'Calculator',
structured_answer => {
input => ['1 dozen * 2'],
operation => 'Calculate',
result => qr/>24</
}
),
'dozen + dozen' => test_zci(
"dozen + dozen = 24",
heading => 'Calculator',
structured_answer => {
input => ['dozen + dozen'],
operation => 'Calculate',
result => qr/>24</
}
),
'2divided by 4' => test_zci(
"2 divided by 4 = 0.5",
heading => 'Calculator',
structured_answer => {
input => ['2 divided by 4'],
operation => 'Calculate',
result => qr/>0.5</
}
),
'2^2' => test_zci(
"2 ^ 2 = 4",
heading => 'Calculator',
structured_answer => {
input => ['2 ^ 2'],
operation => 'Calculate',
result => qr/>4</
}
),
'2^0.2' => test_zci(
"2 ^ 0.2 = 1.14869835499704",
heading => 'Calculator',
structured_answer => {
input => ['2 ^ 0.2'],
operation => 'Calculate',
result => qr/>1\.14869835499704</
}
),
'cos(0)' => test_zci(
"cos(0) = 1",
heading => 'Calculator',
structured_answer => {
input => ['cos(0)'],
operation => 'Calculate',
result => qr/>1</
}
),
'tan(1)' => test_zci(
"tan(1) = 1.5574077246549",
heading => 'Calculator',
structured_answer => {
input => ['tan(1)'],
operation => 'Calculate',
result => qr/>1\.5574077246549</
}
),
'tanh(1)' => test_zci(
"tanh(1) = 0.761594155955765",
heading => 'Calculator',
structured_answer => {
input => ['tanh(1)'],
operation => 'Calculate',
result => qr/>0\.761594155955765</
}
),
'cotan(1)' => test_zci(
"cotan(1) = 0.642092615934331",
heading => 'Calculator',
structured_answer => {
input => ['cotan(1)'],
operation => 'Calculate',
result => qr/>0\.642092615934331</
}
),
'sin(1)' => test_zci(
"sin(1) = 0.841470984807897",
heading => 'Calculator',
structured_answer => {
input => ['sin(1)'],
operation => 'Calculate',
result => qr/>0\.841470984807897</
}
),
'csc(1)' => test_zci(
"csc(1) = 1.18839510577812",
heading => 'Calculator',
structured_answer => {
input => ['csc(1)'],
operation => 'Calculate',
result => qr/>1\.18839510577812</
}
),
'sec(1)' => test_zci(
"sec(1) = 1.85081571768093",
heading => 'Calculator',
structured_answer => {
input => ['sec(1)'],
operation => 'Calculate',
result => qr/>1\.85081571768093</
}
),
'log(3)' => test_zci(
"log(3) = 1.09861228866811",
heading => 'Calculator',
structured_answer => {
input => ['log(3)'],
operation => 'Calculate',
result => qr/>1\.09861228866811</
}
),
'ln(3)' => test_zci(
"log(3) = 1.09861228866811",
heading => 'Calculator',
structured_answer => {
input => ['log(3)'],
operation => 'Calculate',
result => qr/>1\.09861228866811</
}
),
'log10(100.00)' => test_zci(
"log10(100.00) = 2",
heading => 'Calculator',
structured_answer => {
input => ['log10(100.00)'],
operation => 'Calculate',
result => qr/>2</
}
),
'log_10(100.00)' => test_zci(
"log_10(100.00) = 2",
heading => 'Calculator',
structured_answer => {
input => ['log_10(100.00)'],
operation => 'Calculate',
result => qr/>2</
}
),
'log_2(16)' => test_zci(
"log_2(16) = 4",
heading => 'Calculator',
structured_answer => {
input => ['log_2(16)'],
operation => 'Calculate',
result => qr/>4</
}
),
'log_23(25)' => test_zci(
"log_23(25) = 1.0265928122321",
heading => 'Calculator',
structured_answer => {
input => ['log_23(25)'],
operation => 'Calculate',
result => qr/>1\.0265928122321</
}
),
'log23(25)' => test_zci(
"log23(25) = 1.0265928122321",
heading => 'Calculator',
structured_answer => {
input => ['log23(25)'],
operation => 'Calculate',
result => qr/>1\.0265928122321</
}
),
'$3.43+$34.45' => test_zci(
'$3.43 + $34.45 = $37.88',
heading => 'Calculator',
structured_answer => {
input => ['$3.43 + $34.45'],
operation => 'Calculate',
result => qr/>\$37\.88</
}
),
'$3.45+$34.45' => test_zci(
'$3.45 + $34.45 = $37.90',
heading => 'Calculator',
structured_answer => {
input => ['$3.45 + $34.45'],
operation => 'Calculate',
result => qr/>\$37\.90</
}
),
'$3+$34' => test_zci(
'$3 + $34 = $37.00',
heading => 'Calculator',
structured_answer => {
input => ['$3 + $34'],
operation => 'Calculate',
result => qr/>\$37\.00</
}
),
'$3,4+$34,4' => test_zci(
'$3,4 + $34,4 = $37,80',
heading => 'Calculator',
structured_answer => {
input => ['$3,4 + $34,4'],
operation => 'Calculate',
result => qr/>\$37,80</
}
),
'64*343' => test_zci(
'64 * 343 = 21,952',
heading => 'Calculator',
structured_answer => {
input => ['64 * 343'],
operation => 'Calculate',
result => qr/>21,952</
}
),
'1E2 + 1' => test_zci(
'(1 * 10 ^ 2) + 1 = 101',
heading => 'Calculator',
structured_answer => {
input => ['(1 * 10 ^ 2) + 1'],
operation => 'Calculate',
result => qr/>101</
}
),
'1 + 1E2' => test_zci(
'1 + (1 * 10 ^ 2) = 101',
heading => 'Calculator',
structured_answer => {
input => ['1 + (1 * 10 ^ 2)'],
operation => 'Calculate',
result => qr/>101</
}
),
'2 * 3 + 1E2' => test_zci(
'2 * 3 + (1 * 10 ^ 2) = 106',
heading => 'Calculator',
structured_answer => {
input => ['2 * 3 + (1 * 10 ^ 2)'],
operation => 'Calculate',
result => qr/>106</
}
),
'1E2 + 2 * 3' => test_zci(
'(1 * 10 ^ 2) + 2 * 3 = 106',
heading => 'Calculator',
structured_answer => {
input => ['(1 * 10 ^ 2) + 2 * 3'],
operation => 'Calculate',
result => qr/>106</
}
),
'1E2 / 2' => test_zci(
'(1 * 10 ^ 2) / 2 = 50',
heading => 'Calculator',
structured_answer => {
input => ['(1 * 10 ^ 2) / 2'],
operation => 'Calculate',
result => qr/>50</
}
),
'2 / 1E2' => test_zci(
'2 / (1 * 10 ^ 2) = 0.02',
heading => 'Calculator',
structured_answer => {
input => ['2 / (1 * 10 ^ 2)'],
operation => 'Calculate',
result => qr/>0\.02</
}
),
'424334+2253828' => test_zci(
'424334 + 2253828 = 2,678,162',
heading => 'Calculator',
structured_answer => {
input => ['424334 + 2253828'],
operation => 'Calculate',
result => qr/>2,678,162</
}
),
'4.243,34+22.538,28' => test_zci(
'4.243,34 + 22.538,28 = 26.781,62',
heading => 'Calculator',
structured_answer => {
input => ['4.243,34 + 22.538,28'],
operation => 'Calculate',
result => qr/>26\.781,62</
}
),
'sin(1,0) + 1,05' => test_zci(
'sin(1,0) + 1,05 = 1,8914709848079',
heading => 'Calculator',
structured_answer => {
input => ['sin(1,0) + 1,05'],
operation => 'Calculate',
result => qr/>1,8914709848079</
}
),
'21 + 15 x 0 + 5' => test_zci(
'21 + 15 * 0 + 5 = 26',
heading => 'Calculator',
structured_answer => {
input => ['21 + 15 * 0 + 5'],
operation => 'Calculate',
result => qr/>26</
}
),
'0.8158 - 0.8157' => test_zci(
'0.8158 - 0.8157 = 0.0001',
heading => 'Calculator',
structured_answer => {
input => ['0.8158 - 0.8157'],
operation => 'Calculate',
result => qr/>0\.0001</
}
),
'2,90 + 4,6' => test_zci(
'2,90 + 4,6 = 7,50',
heading => 'Calculator',
structured_answer => {
input => ['2,90 + 4,6'],
operation => 'Calculate',
result => qr/>7,50</
}
),
'2,90 + sec(4,6)' => test_zci(
'2,90 + sec(4,6) = -6,01642861135959',
heading => 'Calculator',
structured_answer => {
input => ['2,90 + sec(4,6)'],
operation => 'Calculate',
result => qr/>-6,01642861135959</
}
),
'100 - 96.54' => test_zci(
'100 - 96.54 = 3.46',
heading => 'Calculator',
structured_answer => {
input => ['100 - 96.54'],
operation => 'Calculate',
result => qr/>3\.46</
}
),
'1. + 1.' => test_zci(
'1. + 1. = 2',
heading => 'Calculator',
structured_answer => {
input => ['1. + 1.'],
operation => 'Calculate',
result => qr/>2</
}
),
'1 + sin(pi)' => test_zci(
'1 + sin(pi) = 1',
heading => 'Calculator',
structured_answer => {
input => ['1 + sin(pi)'],
operation => 'Calculate',
result => qr/>1</
}
),
'1 - 1' => test_zci(
'1 - 1 = 0',
heading => 'Calculator',
structured_answer => {
input => ['1 - 1'],
operation => 'Calculate',
result => qr/>0</
}
),
'sin(pi/2)' => test_zci(
'sin(pi / 2) = 1',
heading => 'Calculator',
structured_answer => {
input => ['sin(pi / 2)'],
operation => 'Calculate',
result => qr/>1</
}
),
'sin(pi)' => test_zci(
'sin(pi) = 0',
heading => 'Calculator',
structured_answer => {
input => ['sin(pi)'],
operation => 'Calculate',
result => qr/>0</
}
),
'cos(2pi)' => test_zci(
'cos(2 pi) = 1',
heading => 'Calculator',
structured_answer => {
input => ['cos(2 pi)'],
operation => 'Calculate',
result => qr/>1</
}
),
'5 squared' => test_zci(
'5 ^ 2 = 25',
heading => 'Calculator',
structured_answer => {
input => ['5 ^ 2'],
operation => 'Calculate',
result => qr/>25</
}
),
'sqrt(4)' => test_zci(
'sqrt(4) = 2',
heading => 'Calculator',
structured_answer => {
input => ['sqrt(4)'],
operation => 'Calculate',
result => qr/>2</
}
),
'1.0 + 5 squared' => test_zci(
'1.0 + 5 ^ 2 = 26',
heading => 'Calculator',
structured_answer => {
input => ['1.0 + 5 ^ 2'],
operation => 'Calculate',
result => qr/>26</
}
),
'3 squared + 4 squared' => test_zci(
'3 ^ 2 + 4 ^ 2 = 25',
heading => 'Calculator',
structured_answer => {
input => ['3 ^ 2 + 4 ^ 2'],
operation => 'Calculate',
result => qr/>25</
}
),
'2,2 squared' => test_zci(
'2,2 ^ 2 = 4,84',
heading => 'Calculator',
structured_answer => {
input => ['2,2 ^ 2'],
operation => 'Calculate',
result => qr/>4,84</
}
),
'0.8^2 + 0.6^2' => test_zci(
'0.8 ^ 2 + 0.6 ^ 2 = 1',
heading => 'Calculator',
structured_answer => {
input => ['0.8 ^ 2 + 0.6 ^ 2'],
operation => 'Calculate',
result => qr/>1</,
}
),
'2 squared ^ 3' => test_zci(
'2 ^ 2 ^ 3 = 256',
heading => 'Calculator',
structured_answer => {
input => ['2 ^ 2 ^ 3'],
operation => 'Calculate',
result => qr/>256</
}
),
'2 squared ^ 3.06' => test_zci(
'2 ^ 2 ^ 3.06 = 323.972172143725',
heading => 'Calculator',
structured_answer => {
input => ['2 ^ 2 ^ 3.06'],
operation => 'Calculate',
result => qr/>323\.972172143725</
}
),
'2^3 squared' => test_zci(
'2 ^ 3 ^ 2 = 512',
heading => 'Calculator',
structured_answer => {
input => ['2 ^ 3 ^ 2'],
operation => 'Calculate',
result => qr/>512</
}
),
'sqrt(2)' => test_zci(
'sqrt(2) = 1.4142135623731',
heading => 'Calculator',
structured_answer => {
input => ['sqrt(2)'],
operation => 'Calculate',
result => qr/>1\.4142135623731</
}
),
'sqrt(3 pi / 4 + 1) + 1' => test_zci(
'sqrt(3 pi / 4 + 1) + 1 = 2.83199194599549',
heading => 'Calculator',
structured_answer => {
input => ['sqrt(3 pi / 4 + 1) + 1'],
operation => 'Calculate',
result => qr/>2\.83199194599549</
}
),
'4 score + 7' => test_zci(
'4 score + 7 = 87',
heading => 'Calculator',
structured_answer => {
input => ['4 score + 7'],
operation => 'Calculate',
result => qr/>87</
}
),
'418.1 / 2' => test_zci(
'418.1 / 2 = 209.05',
heading => 'Calculator',
structured_answer => {
input => ['418.1 / 2'],
operation => 'Calculate',
result => qr/>209\.05</
}
),
'418.005 / 8' => test_zci(
'418.005 / 8 = 52.250625',
heading => 'Calculator',
structured_answer => {
input => ['418.005 / 8'],
operation => 'Calculate',
result => qr/>52\.250625</
}
),
'(pi^4+pi^5)^(1/6)' => test_zci(
'(pi ^ 4 + pi ^ 5) ^ (1 / 6) = 2.71828180861191',
heading => 'Calculator',
structured_answer => {
input => ['(pi ^ 4 + pi ^ 5) ^ (1 / 6)'],
operation => 'Calculate',
result => qr/>2\.71828180861191</
}
),
'(pi^4+pi^5)^(1/6)+1' => test_zci(
'(pi ^ 4 + pi ^ 5) ^ (1 / 6) + 1 = 3.71828180861191',
heading => 'Calculator',
structured_answer => {
input => ['(pi ^ 4 + pi ^ 5) ^ (1 / 6) + 1'],
operation => 'Calculate',
result => qr/>3\.71828180861191</
}
),
'5^4^(3-2)^1' => test_zci(
'5 ^ 4 ^ (3 - 2) ^ 1 = 625',
heading => 'Calculator',
structured_answer => {
input => ['5 ^ 4 ^ (3 - 2) ^ 1'],
operation => 'Calculate',
result => qr/>625</
}
),
'(5-4)^(3-2)^1' => test_zci(
'(5 - 4) ^ (3 - 2) ^ 1 = 1',
heading => 'Calculator',
structured_answer => {
input => ['(5 - 4) ^ (3 - 2) ^ 1'],
operation => 'Calculate',
result => qr/>1</
}
),
'(5+4-3)^(2-1)' => test_zci(
'(5 + 4 - 3) ^ (2 - 1) = 6',
heading => 'Calculator',
structured_answer => {
input => ['(5 + 4 - 3) ^ (2 - 1)'],
operation => 'Calculate',
result => qr/>6</
}
),
'5^((4-3)*(2+1))+6' => test_zci(
'5 ^ ((4 - 3) * (2 + 1)) + 6 = 131',
heading => 'Calculator',
structured_answer => {
input => ['5 ^ ((4 - 3) * (2 + 1)) + 6'],
operation => 'Calculate',
result => qr/>131</
}
),
'20x07' => test_zci(
'20 * 07 = 140',
heading => 'Calculator',
structured_answer => {
input => ['20 * 07'],
operation => 'Calculate',
result => qr/>140</
}
),
'83.166.167.160/33' => test_zci(
'83.166.167.160 / 33 = 2.520.186.883,63636',
heading => 'Calculator',
structured_answer => {
input => ['83.166.167.160 / 33'],
operation => 'Calculate',
result => qr/>2\.520\.186\.883,63636</
}
),
'123.123.123.123/255.255.255.256' => test_zci(
'123.123.123.123 / 255.255.255.256 = 0,482352941174581',
heading => 'Calculator',
structured_answer => {
input => ['123.123.123.123 / 255.255.255.256'],
operation => 'Calculate',
result => qr/>0,482352941174581</
}
),
'4E5 +1 ' => test_zci(
'(4 * 10 ^ 5) + 1 = 400,001',
heading => 'Calculator',
structured_answer => {
input => ['(4 * 10 ^ 5) + 1'],
operation => 'Calculate',
result => qr/>400,001</
}
),
'4e5 +1 ' => test_zci(
'(4 * 10 ^ 5) + 1 = 400,001',
heading => 'Calculator',
structured_answer => {
input => ['(4 * 10 ^ 5) + 1'],
operation => 'Calculate',
result => qr/>400,001</
}
),
'pi/1e9' => test_zci(
'pi / (1 * 10 ^ 9) = 3.14159265358979 * 10^-9',
heading => 'Calculator',
structured_answer => {
input => ['pi / (1 * 10 ^ 9)'],
operation => 'Calculate',
result => qr/>3\.14159265358979 \* 10<sup>-9<\/sup></
}
),
'pi*1e9' => test_zci(
'pi * (1 * 10 ^ 9) = 3,141,592,653.58979',
heading => 'Calculator',
structured_answer => {
input => ['pi * (1 * 10 ^ 9)'],
operation => 'Calculate',
result => qr/>3,141,592,653\.58979</
}
),
'1 234 + 5 432' => test_zci(
'1234 + 5432 = 6,666',
heading => 'Calculator',
structured_answer => {
input => ['1234 + 5432'],
operation => 'Calculate',
result => qr/6,666/
}
),
'1_234 + 5_432' => test_zci(
'1234 + 5432 = 6,666',
heading => 'Calculator',
structured_answer => {
input => ['1234 + 5432'],
operation => 'Calculate',
result => qr/6,666/
}
),
'(0.4e^(0))*cos(0)' => test_zci(
'(0.4e ^ (0)) * cos(0) = 0.4',
heading => 'Calculator',
structured_answer => {
input => ['(0.4e ^ (0)) * cos(0)'],
operation => 'Calculate',
result => qr'0.4'
}
),
'2pi' => test_zci(
'2 pi = 6.28318530717958',
heading => 'Calculator',
structured_answer => {
input => ['2 pi'],
operation => 'Calculate',
result => qr"6.28318530717958"
}
),
'123.123.123.123/255.255.255.255' => undef,
'83.166.167.160/27' => undef,
'9 + 0 x 07' => undef,
'0x07' => undef,
'sin(1.0) + 1,05' => undef,
'4,24,334+22,53,828' => undef,
'5234534.34.54+1' => undef,
'//' => undef,
dividedbydividedby => undef,
time => undef, # We eval perl directly, only do whitelisted stuff!
'four squared' => undef,
'! + 1' => undef, # Regression test for bad query trigger.
'$5' => undef,
'calculate 5' => undef,
'solve $50' => undef,
'382-538-2546' => undef, # Calling DuckDuckGo
'(382) 538-2546' => undef,
'382-538-2546 x1234' => undef,
'1-382-538-2546' => undef,
'+1-(382)-538-2546' => undef,
'382.538.2546' => undef,
'+38-2538111111' => undef,
'+382538-111-111' => undef,
'+38 2538 111-111' => undef,
'01780-111-111' => undef,
'01780-111-111x400' => undef,
'(01780) 111 111' => undef,
);
done_testing;
| 31.044192 | 102 | 0.377883 |
edc372623a49d057db37bf11cfcdca581d3bf37a | 4,496 | pm | Perl | lib/MusicBrainz/Server/Edit/Release/AddReleaseLabel.pm | david-russo/musicbrainz-server | a25e1a4bb28047ab1fcf6073e0c56f782671c046 | [
"BSD-2-Clause"
] | null | null | null | lib/MusicBrainz/Server/Edit/Release/AddReleaseLabel.pm | david-russo/musicbrainz-server | a25e1a4bb28047ab1fcf6073e0c56f782671c046 | [
"BSD-2-Clause"
] | null | null | null | lib/MusicBrainz/Server/Edit/Release/AddReleaseLabel.pm | david-russo/musicbrainz-server | a25e1a4bb28047ab1fcf6073e0c56f782671c046 | [
"BSD-2-Clause"
] | null | null | null | package MusicBrainz::Server::Edit::Release::AddReleaseLabel;
use Carp;
use Moose;
use MooseX::Types::Moose qw( Int Str );
use MooseX::Types::Structured qw( Dict Optional );
use MusicBrainz::Server::Constants qw( $EDIT_RELEASE_ADDRELEASELABEL );
use MusicBrainz::Server::Edit::Types qw( Nullable NullableOnPreview );
use MusicBrainz::Server::Edit::Utils qw( gid_or_id );
use MusicBrainz::Server::Translation qw( N_l );
extends 'MusicBrainz::Server::Edit';
with 'MusicBrainz::Server::Edit::Role::Preview';
with 'MusicBrainz::Server::Edit::Release::RelatedEntities';
with 'MusicBrainz::Server::Edit::Release';
with 'MusicBrainz::Server::Edit::Role::Insert';
with 'MusicBrainz::Server::Edit::Role::AlwaysAutoEdit';
sub edit_name { N_l('Add release label') }
sub edit_kind { 'add' }
sub edit_type { $EDIT_RELEASE_ADDRELEASELABEL }
sub alter_edit_pending { { Release => [ shift->release_id ] } }
sub edit_template_react { 'AddReleaseLabel' }
use aliased 'MusicBrainz::Server::Entity::Label';
use aliased 'MusicBrainz::Server::Entity::Release';
around _build_related_entities => sub {
my ($orig, $self, @args) = @_;
my %related = %{ $self->$orig(@args) };
$related{label} = [ $self->data->{label}{id} ]
if $self->data->{label};
return \%related;
};
has '+data' => (
isa => Dict[
release => NullableOnPreview[Dict[
id => Int,
gid => Optional[Str],
name => Str
]],
label => Nullable[Dict[
id => Int,
gid => Optional[Str],
name => Str
]],
catalog_number => Nullable[Str]
]
);
sub release_id { shift->data->{release}{id} }
sub initialize {
my ($self, %opts) = @_;
my $release = delete $opts{release};
die 'Missing "release" argument' unless ($release || $self->preview);
if ($release) {
$self->c->model('ReleaseLabel')->load($release) unless $release->all_labels;
$self->throw_if_release_label_is_duplicate(
$release,
$opts{label} ? $opts{label}->id : undef,
$opts{catalog_number}
);
}
$opts{release} = {
id => $release->id,
gid => $release->gid,
name => $release->name
} if $release;
$opts{label} = {
id => $opts{label}->id,
gid => $opts{label}->gid,
name => $opts{label}->name
} if $opts{label};
$self->data(\%opts);
};
sub foreign_keys {
my $self = shift;
my %fk;
my $data = $self->data;
$fk{Release} = { gid_or_id($data->{release}) => ['ArtistCredit'] } if $data->{release};
$fk{Label} = [gid_or_id($data->{label})] if $data->{label};
return \%fk;
};
sub build_display_data {
my ($self, $loaded) = @_;
my $data = $self->data;
my $display_data = {
catalog_number => $self->data->{catalog_number},
};
unless ($self->preview) {
$display_data->{release} = $loaded->{Release}->{gid_or_id($data->{release})} //
Release->new(name => $data->{release}{name});
}
if ($data->{label}) {
$display_data->{label} = $loaded->{Label}->{gid_or_id($data->{label})} //
Label->new(name => $data->{label}{name});
}
return $display_data;
}
sub insert
{
my $self = shift;
my %args = (
release_id => $self->release_id,
);
$args{catalog_number} = $self->data->{catalog_number}
if exists $self->data->{catalog_number};
$args{label_id} = $self->data->{label}{id}
if $self->data->{label};
my $rl = $self->c->model('ReleaseLabel')->insert(\%args);
$self->entity_id($rl->id);
}
sub reject
{
my $self = shift;
$self->c->model('ReleaseLabel')->delete($self->entity_id);
}
__PACKAGE__->meta->make_immutable;
no Moose;
1;
=head1 COPYRIGHT
Copyright (C) 2010 MetaBrainz Foundation
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
=cut
| 27.414634 | 91 | 0.620996 |
edc1d32227b9c1d38c67ceea673c8d5aef22e018 | 1,762 | t | Perl | auth_delay.t | muradm/nginx-tests | ddfb68b9012a0aa6d60eb7e139830d4b3c86ff2a | [
"BSD-2-Clause"
] | null | null | null | auth_delay.t | muradm/nginx-tests | ddfb68b9012a0aa6d60eb7e139830d4b3c86ff2a | [
"BSD-2-Clause"
] | null | null | null | auth_delay.t | muradm/nginx-tests | ddfb68b9012a0aa6d60eb7e139830d4b3c86ff2a | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/perl
# (C) Sergey Kandaurov
# (C) Nginx, Inc.
# Tests for auth_delay directive using auth basic module.
###############################################################################
use warnings;
use strict;
use Test::More;
use MIME::Base64;
BEGIN { use FindBin; chdir($FindBin::Bin); }
use lib 'lib';
use Test::Nginx;
###############################################################################
select STDERR; $| = 1;
select STDOUT; $| = 1;
my $t = Test::Nginx->new()->has(qw/http auth_basic/)
->write_file_expand('nginx.conf', <<'EOF');
%%TEST_GLOBALS%%
daemon off;
events {
}
http {
%%TEST_GLOBALS_HTTP%%
server {
listen 127.0.0.1:8080;
server_name localhost;
location / {
auth_delay 2s;
auth_basic "closed site";
auth_basic_user_file %%TESTDIR%%/htpasswd;
}
}
}
EOF
$t->write_file('index.html', '');
$t->write_file('htpasswd', 'user:' . '{PLAIN}good' . "\n");
$t->try_run('no auth_delay')->plan(4);
###############################################################################
my $t1 = time();
like(http_get_auth('/', 'user', 'bad'), qr/401 Unauthorize/, 'not authorized');
cmp_ok(time() - $t1, '>=', 2, 'auth delay');
$t1 = time();
like(http_get_auth('/', 'user', 'good'), qr/200 OK/, 'authorized');
cmp_ok(time() - $t1, '<', 2, 'no delay');
###############################################################################
sub http_get_auth {
my ($url, $user, $password) = @_;
my $auth = encode_base64($user . ':' . $password, '');
return http(<<EOF);
GET $url HTTP/1.0
Host: localhost
Authorization: Basic $auth
EOF
}
###############################################################################
| 20.488372 | 79 | 0.448922 |
edc1394ff062f7464d6630e69c13cbe43d275409 | 5,858 | pm | Perl | samples/client/petstore/perl/lib/WWW/SwaggerClient/Object/EnumTest.pm | phil-lopreiato/swagger-codegen | 6ad8df57f7c1d593eb0fc7d132a5cef5ce0ed100 | [
"Apache-2.0"
] | 2 | 2017-12-19T12:48:04.000Z | 2018-02-25T18:31:51.000Z | samples/client/petstore/perl/lib/WWW/SwaggerClient/Object/EnumTest.pm | phil-lopreiato/swagger-codegen | 6ad8df57f7c1d593eb0fc7d132a5cef5ce0ed100 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/perl/lib/WWW/SwaggerClient/Object/EnumTest.pm | phil-lopreiato/swagger-codegen | 6ad8df57f7c1d593eb0fc7d132a5cef5ce0ed100 | [
"Apache-2.0"
] | 3 | 2018-09-03T12:58:01.000Z | 2021-02-19T06:00:30.000Z | =begin comment
Swagger Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end comment
=cut
#
# NOTE: This class is auto generated by the swagger code generator program.
# Do not edit the class manually.
# Ref: https://github.com/swagger-api/swagger-codegen
#
package WWW::SwaggerClient::Object::EnumTest;
require 5.6.0;
use strict;
use warnings;
use utf8;
use JSON qw(decode_json);
use Data::Dumper;
use Module::Runtime qw(use_module);
use Log::Any qw($log);
use Date::Parse;
use DateTime;
use base ("Class::Accessor", "Class::Data::Inheritable");
#
#
#
# NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
# REF: https://github.com/swagger-api/swagger-codegen
#
=begin comment
Swagger Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end comment
=cut
#
# NOTE: This class is auto generated by the swagger code generator program.
# Do not edit the class manually.
# Ref: https://github.com/swagger-api/swagger-codegen
#
__PACKAGE__->mk_classdata('attribute_map' => {});
__PACKAGE__->mk_classdata('swagger_types' => {});
__PACKAGE__->mk_classdata('method_documentation' => {});
__PACKAGE__->mk_classdata('class_documentation' => {});
# new object
sub new {
my ($class, %args) = @_;
my $self = bless {}, $class;
foreach my $attribute (keys %{$class->attribute_map}) {
my $args_key = $class->attribute_map->{$attribute};
$self->$attribute( $args{ $args_key } );
}
return $self;
}
# return perl hash
sub to_hash {
return decode_json(JSON->new->convert_blessed->encode( shift ));
}
# used by JSON for serialization
sub TO_JSON {
my $self = shift;
my $_data = {};
foreach my $_key (keys %{$self->attribute_map}) {
if (defined $self->{$_key}) {
$_data->{$self->attribute_map->{$_key}} = $self->{$_key};
}
}
return $_data;
}
# from Perl hashref
sub from_hash {
my ($self, $hash) = @_;
# loop through attributes and use swagger_types to deserialize the data
while ( my ($_key, $_type) = each %{$self->swagger_types} ) {
my $_json_attribute = $self->attribute_map->{$_key};
if ($_type =~ /^array\[/i) { # array
my $_subclass = substr($_type, 6, -1);
my @_array = ();
foreach my $_element (@{$hash->{$_json_attribute}}) {
push @_array, $self->_deserialize($_subclass, $_element);
}
$self->{$_key} = \@_array;
} elsif (exists $hash->{$_json_attribute}) { #hash(model), primitive, datetime
$self->{$_key} = $self->_deserialize($_type, $hash->{$_json_attribute});
} else {
$log->debugf("Warning: %s (%s) does not exist in input hash\n", $_key, $_json_attribute);
}
}
return $self;
}
# deserialize non-array data
sub _deserialize {
my ($self, $type, $data) = @_;
$log->debugf("deserializing %s with %s",Dumper($data), $type);
if ($type eq 'DateTime') {
return DateTime->from_epoch(epoch => str2time($data));
} elsif ( grep( /^$type$/, ('int', 'double', 'string', 'boolean'))) {
return $data;
} else { # hash(model)
my $_instance = eval "WWW::SwaggerClient::Object::$type->new()";
return $_instance->from_hash($data);
}
}
__PACKAGE__->class_documentation({description => '',
class => 'EnumTest',
required => [], # TODO
} );
__PACKAGE__->method_documentation({
'enum_string' => {
datatype => 'string',
base_name => 'enum_string',
description => '',
format => '',
read_only => '',
},
'enum_integer' => {
datatype => 'int',
base_name => 'enum_integer',
description => '',
format => '',
read_only => '',
},
'enum_number' => {
datatype => 'double',
base_name => 'enum_number',
description => '',
format => '',
read_only => '',
},
});
__PACKAGE__->swagger_types( {
'enum_string' => 'string',
'enum_integer' => 'int',
'enum_number' => 'double'
} );
__PACKAGE__->attribute_map( {
'enum_string' => 'enum_string',
'enum_integer' => 'enum_integer',
'enum_number' => 'enum_number'
} );
__PACKAGE__->mk_accessors(keys %{__PACKAGE__->attribute_map});
1;
| 28.163462 | 156 | 0.645101 |
eddb31448e7196285767b83d1cf199575eaaabdc | 48,817 | pm | Perl | lib/VertRes/Parser/bam.pm | sanger-pathogens/vr-codebase | a85c4e29938cd13b36fa0a1bde1db0abd7201912 | [
"BSD-Source-Code"
] | 7 | 2015-11-20T11:38:02.000Z | 2020-11-02T18:08:18.000Z | lib/VertRes/Parser/bam.pm | sanger-pathogens/vr-codebase | a85c4e29938cd13b36fa0a1bde1db0abd7201912 | [
"BSD-Source-Code"
] | 11 | 2015-05-12T11:09:51.000Z | 2022-03-22T11:11:20.000Z | lib/VertRes/Parser/bam.pm | sanger-pathogens/vr-codebase | a85c4e29938cd13b36fa0a1bde1db0abd7201912 | [
"BSD-Source-Code"
] | 14 | 2015-06-26T09:28:41.000Z | 2021-07-23T11:25:28.000Z | =head1 NAME
VertRes::Parser::bam - parse and write bam files
=head1 SYNOPSIS
use VertRes::Parser::bam;
# create object, supplying bam file (filehandles not supported)
my $pars = VertRes::Parser::bam->new(file => 'my.bam');
# get header information
my $program = $pars->program();
my %readgroup_info = $pars->readgroup_info();
# etc.
# get the hash reference that will hold the most recently requested result
my $result_holder = $pars->result_holder();
# just count the number of alignments in the whole bam file:
my $c = 0;
while ($pars->next_result()) {
$c++;
}
# unlike other parsers, if you actually want to extract results, you need to
# specify ahead of time which fields you're interested in:
$pars->get_fields('QNAME', 'FLAG', 'RG');
while ($pars->next_result()) {
# check $result_holder for desired info, eg:
my $flag = $result_holder->{FLAG};
# get info about a flag, eg:
my $mapped = $pars->is_mapped($flag);
print "$result_holder->{QNAME} belongs to readgroup $result_holder->{RG}\n";
}
# do an efficient, fast stream through a bam, but only get results in regions of
# interest:
foreach my $region ('1:10000-20000', '3:400000-5000000') {
$pars->region($region);
while ($pars->next_result()) {
# ...
}
}
# while going through next_result, you can also write those alignments out to a
# new bam file, optionally ignoring tags to reduce output file size (and
# increase speed). Eg. write Q30 chr20 reads where both mates of a pair
# mapped to a new 'chr20.mapped.bam' file, where one of the pair is mapped to
# a 'chr20.partial.bam' file, and where both are unmapped to a
# 'chr20.unmapped.bam', ignoring the big OQ tags:
$pars->region('20');
$pars->minimum_quality(30);
$pars->get_fields('FLAG');
$pars->ignore_tags_on_write('OQ');
while ($pars->next_result) {
my $flag = $rh->{FLAG};
if ($pars->is_mapped_paired($flag)) {
$pars->write_result('chr20.mapped.bam');
}
elsif ($pars->is_mapped($flag) || $pars->is_mate_mapped($flag)) {
$pars->write_result('chr20.partial.bam');
}
else {
$pars->write_result('chr20.unmapped.bam');
}
}
=head1 DESCRIPTION
A parser for bam files (not sam files).
The environment variable SAMTOOLS must point to a directory where samtools
source has been compiled, so containing at least bam.h and libbam.a.
See http://cpansearch.perl.org/src/LDS/Bio-SamTools-1.06/README for advice on
gettings things to work. Specifically, you'll probably need to add -fPIC and
-m64 to the CFLAGS line in samtools's Makefile before compiling.
=head1 AUTHOR
Sendu Bala: [email protected]
=cut
package VertRes::Parser::bam;
use strict;
use warnings;
use Cwd qw(abs_path);
use Inline C => Config => FILTERS => 'Strip_POD' =>
INC => "-I$ENV{SAMTOOLS}" =>
LIBS => "-L$ENV{SAMTOOLS} -lbam -lz" =>
CCFLAGS => '-D_IOLIB=2 -D_FILE_OFFSET_BITS=64'=>
enable => 'UNTAINT' =>
DIRECTORY => $ENV{PERL_INLINE_DIRECTORY} =>
no_untaint_warn => 1;
use base qw(VertRes::Parser::ParserI);
our %flags = (paired_tech => 0x0001,
paired_map => 0x0002,
self_unmapped => 0x0004,
mate_unmapped => 0x0008,
self_reverse => 0x0010,
mate_reverse => 0x0020,
'1st_in_pair' => 0x0040,
'2nd_in_pair' => 0x0080,
not_primary => 0x0100,
failed_qc => 0x0200,
duplicate => 0x0400);
=head2 new
Title : new
Usage : my $obj = VertRes::Parser::bam->new(file => 'filename');
Function: Build a new VertRes::Parser::bam object.
Returns : VertRes::Parser::bam object
Args : file => filename
=cut
sub new {
my ($class, @args) = @_;
my $self = $class->SUPER::new(@args);
# unlike normal parsers, our result holder is a hash ref
$self->{_result_holder} = {};
# we set up a hash to store write refs
$self->{_writes} = {};
# reset all the C static vars
$self->_reset();
return $self;
}
=head2 file
Title : file
Usage : $obj->file('filename.bam');
Function: Get/set filename; when setting also opens the file and sets fh().
There is also read support for remote files like
'ftp://ftp..../file.bam' and it will be downloaded to a temporary
location and opened.
NB: setting a new input bam will close any existing input bam and any
output files being written to
Returns : absolute path of file
Args : filename
=cut
sub file {
my ($self, $filename) = @_;
if ($filename) {
if (defined $self->{_filename}) {
$self->close;
}
if ($filename =~ /^ftp:|^http:/) {
# *** this needs to be upgraded to the proper samtools C api support
# for remote files, instead of copying local and losing the bai
# file!
$filename = $self->get_remote_file($filename) || $self->throw("Could not download remote file '$filename'");
}
# avoid potential problems with caller changing dir and things being
# relative; also more informative and explicit to throw with full path
$filename = abs_path($filename);
# set up the open command which is just for the header
my $open = "samtools view -H $filename |";
# go ahead and open it (3 arg form not working when middle is optional)
open(my $fh, $open) || $self->throw("Couldn't open '$open': $!");
$self->{_filename} = $filename;
$self->fh($fh);
# open in the C API
($self->{_chead}, $self->{_cbam}, $self->{_cb}) = $self->_initialize_bam($filename);
}
return $self->{_filename};
}
=head2 close
Title : close
Usage : $obj->close();
Function: Ends the read of this sam/bam.
Returns : n/a
Args : n/a
=cut
sub close {
my $self = shift;
my $fh = $self->fh();
if ($fh) {
# make sure we've finished reading the whole thing before attempting to
# close
while (<$fh>) {
next;
}
if (defined $self->{_cbam}) {
$self->_close_bam($self->{_cbam});
$self->_close_idx;
}
while (my ($key, $val) = each %{$self->{_writes} || {}}) {
$self->_close_bam($val);
}
}
return $self->SUPER::close();
}
use Inline C => <<'END_C';
=head2 is_sequencing_paired
Title : is_sequencing_paired
Usage : if ($obj->is_sequencing_paired($flag)) { ... };
Function: Ask if a given flag indicates the read was paired in sequencing.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_sequencing_paired(SV* self, int flag) {
return (flag & 0x0001) > 0 ? 1 : 0;
}
=head2 is_mapped_paired
Title : is_mapped_paired
Usage : if ($obj->is_mapped_paired($flag)) { ... };
Function: Ask if a given flag indicates the read was mapped in a proper pair.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_mapped_paired(SV* self, int flag) {
return (flag & 0x0002) > 0 ? 1 : 0;
}
=head2 is_mapped
Title : is_mapped
Usage : if ($obj->is_mapped($flag)) { ... };
Function: Ask if a given flag indicates the read was itself mapped.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_mapped(SV* self, int flag) {
return (flag & 0x0004) == 0 ? 1 : 0;
}
=head2 is_mate_mapped
Title : is_mate_mapped
Usage : if ($obj->is_mate_mapped($flag)) { ... };
Function: Ask if a given flag indicates the read's mate was mapped.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_mate_mapped(SV* self, int flag) {
return (flag & 0x0008) == 0 ? 1 : 0;
}
=head2 is_reverse_strand
Title : is_reverse_strand
Usage : if ($obj->is_reverse_strand($flag)) { ... };
Function: Ask if a given flag indicates the read is on the reverse stand.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_reverse_strand(SV* self, int flag) {
return (flag & 0x0010) > 0 ? 1 : 0;
}
=head2 is_mate_reverse_strand
Title : is_mate_reverse_strand
Usage : if ($obj->is_mate_reverse_strand($flag)) { ... };
Function: Ask if a given flag indicates the read's mate is on the reverse
stand.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_mate_reverse_strand(SV* self, int flag) {
return (flag & 0x0020) > 0 ? 1 : 0;
}
=head2 is_first
Title : is_first
Usage : if ($obj->is_first($flag)) { ... };
Function: Ask if a given flag indicates the read was the first of a pair.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_first(SV* self, int flag) {
return (flag & 0x0040) > 0 ? 1 : 0;
}
=head2 is_second
Title : is_second
Usage : if ($obj->is_second($flag)) { ... };
Function: Ask if a given flag indicates the read was the second of a pair.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_second(SV* self, int flag) {
return (flag & 0x0080) > 0 ? 1 : 0;
}
=head2 is_primary
Title : is_primary
Usage : if ($obj->is_primary($flag)) { ... };
Function: Ask if a given flag indicates the read alignment was primary.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_primary(SV* self, int flag) {
return (flag & 0x0100) == 0 ? 1 : 0;
}
=head2 passes_qc
Title : passes_qc
Usage : if ($obj->passes_qc($flag)) { ... };
Function: Ask if a given flag indicates the read passes quality checks.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int passes_qc(SV* self, int flag) {
return (flag & 0x0200) == 0 ? 1 : 0;
}
=head2 is_duplicate
Title : is_duplicate
Usage : if ($obj->is_duplicate($flag)) { ... };
Function: Ask if a given flag indicates the read was a duplicate.
Returns : boolean
Args : int (the flag recieved from $result_holder->{FLAG})
=cut
int is_duplicate(SV* self, int flag) {
return (flag & 0x0400) > 0 ? 1 : 0;
}
END_C
=head2 sam_version
Title : sam_version
Usage : my $sam_version = $obj->sam_version();
Function: Return the file format version of this sam file, as given in the
header.
Returns : number (undef if no header)
Args : n/a
=cut
sub sam_version {
my $self = shift;
return $self->_get_single_header_tag('HD', 'VN');
}
=head2 group_order
Title : group_order
Usage : my $group_order = $obj->group_order();
Function: Return the group order of this sam file, as given in the header.
Returns : string (undef if no header or not given in header)
Args : n/a
=cut
sub group_order {
my $self = shift;
return $self->_get_single_header_tag('HD', 'GO');
}
=head2 sort_order
Title : sort_order
Usage : my $sort_order = $obj->sort_order();
Function: Return the sort order of this sam file, as given in the header.
Returns : string (undef if no header or not given in header)
Args : n/a
=cut
sub sort_order {
my $self = shift;
return $self->_get_single_header_tag('HD', 'SO');
}
=head2 program_info
Title : program_info
Usage : my %all_program_info = $obj->program_info();
Function: Get information about the programs used to create/process this bam,
as reported in the header.
Returns : undef if no PG lines in header, else:
with no args: hash (keys are program ids, values are hash refs with
keys as tags (like VN and CL))
with just a program id: hash (keys as tags, like VN and CL)
with a program and a tag: the value of that tag for that program
Args : none for all info,
program id for all the info for just that program,
program id and tag (like 'VN' or 'CL') for specific info
=cut
sub program_info {
my $self = shift;
return $self->_handle_multi_line_header_types('PG', @_);
}
=head2 program
Title : program
Usage : my $program_id = $obj->program();
Function: Return the ID of the program used to do the mapping, as given in
the header.
If there is more than 1 PG header line, tries to guess which one is
for the mapping program.
If you want the program name, use program_info, find the PG line
with the program id from this function, then pull out PN from the
hash.
Returns : string (undef if no header or not given in header)
Args : n/a
=cut
sub program {
my $self = shift;
return $self->_guess_mapping_program();
}
sub _guess_mapping_program {
my $self = shift;
my %info = $self->program_info();
my @programs = keys %info;
if (@programs == 1) {
return $programs[0];
}
else {
# This does not work for UK10K bams
# foreach my $program (@programs) {
# if ($program =~ /bwa|maq|ssha|bfast|stampy/ || $program !~ /GATK/) {
# return $program;
# }
# }
my (@known_prg,@unknown_prg);
for my $program (@programs) {
if ($program =~ /bwa|maq|ssaha|bfast|stampy/) {
push @known_prg, $program;
}
elsif ($program !~ /GATK/) {
push @unknown_prg, $program;
}
}
if (@known_prg) {
return $known_prg[0];
}
elsif (@unknown_prg) {
return $unknown_prg[0];
}
# guess randomly
if (@programs) {
return $programs[0];
}
else {
# OMG, there's no PG lines in this bam file!
return 'unknown_algorithm';
}
}
}
=head2 program_version
Title : program_version
Usage : my $program_version = $obj->program_version();
Function: Return the program version used to do the mapping, as given in the
header.
If there is more than 1 PG header line, tries to guess which one is
for the mapping program.
Returns : string (undef if no header or not given in header)
Args : n/a
=cut
sub program_version {
my $self = shift;
my $program_id = $self->_guess_mapping_program();
return $self->program_info($program_id, 'VN');
}
=head2 command_line
Title : command_line
Usage : my $command_line = $obj->command_line();
Function: Return the command line used to do the mapping, as given in the
header.
If there is more than 1 PG header line, tries to guess which one is
for the mapping program.
Returns : string (undef if no header or not given in header)
Args : n/a
=cut
sub command_line {
my $self = shift;
my $program_id = $self->_guess_mapping_program();
return $self->program_info($program_id, 'CL');
}
=head2 sequence_info
Title : sequence_info
Usage : my %all_sequences_info = $obj->sequence_info();
my %sequence_info = $obj->sequence_info('chr1');
my $seq_length = $obj->sequence_info('chr1', 'LN');
Function: Get information about the reference sequences, as reported in the
header.
Returns : undef if no SQ lines in header, else:
with no args: hash (keys are sequence ids, values are hash refs with
keys as tags (like LN and M5))
with just a sequence id: hash (keys as tags, like LN and M5)
with a sequence and a tag: the value of that tag for that sequence
Args : none for all info,
sequence id for all the info for just that sequence,
sequence id and tag (like 'LN' or 'M5') for specific info
=cut
sub sequence_info {
my $self = shift;
return $self->_handle_multi_line_header_types('SQ', @_);
}
=head2 readgroup_info
Title : readgroup_info
Usage : my %all_rg_info = $obj->readgroup_info();
my %rg_info = $obj->readgroup_info('SRR00001');
my $library = $obj->readgroup_info('SRR00001', 'LB');
Function: Get information about the read groups, as reported in the header.
Returns : undef if no RG lines in header, else:
with no args: hash (keys are readgroups, values are hash refs with
keys as tags (like LB and SM))
with just a readgroup id: hash (keys as tags, like LB and SM)
with a readgroup and a tag: the value of that tag for that readgroup
Args : none for all info,
readgroup id for all the info for just that readgroup,
readgroup id and tag (like 'LB' or 'SM') for specific info
=cut
sub readgroup_info {
my $self = shift;
return $self->_handle_multi_line_header_types('RG', @_);
}
=head2 samples
Title : samples
Usage : my @samples = $obj->samples();
Function: Get all the unique SM fields from amongst all RG lines in
the header.
Returns : list of strings (sample names)
Args : none
=cut
sub samples {
my $self = shift;
return $self->_get_unique_rg_fields('SM');
}
sub _get_unique_rg_fields {
my ($self, $field) = @_;
my %vals;
my %rg_info = $self->readgroup_info();
while (my ($rg, $data) = each %rg_info) {
$vals{$data->{$field} || next} = 1;
}
my @uniques = sort keys %vals;
return @uniques;
}
sub _handle_multi_line_header_types {
my ($self, $type, $id, $tag) = @_;
my $lines = $self->_get_header_type($type) || return;
# organise the data into by-id hash
my %all_info;
foreach my $line (@{$lines}) {
my %this_data = $self->_tags_to_hash(@{$line});
my $this_id = $this_data{SN} || $this_data{ID};
delete $this_data{SN};
delete $this_data{ID};
$all_info{$this_id} = \%this_data;
}
if (defined $id) {
my $id_info = $all_info{$id} || return;
if ($tag) {
return $id_info->{$tag};
}
else {
return %{$id_info};
}
}
else {
return %all_info;
}
}
sub _get_single_header_tag {
my ($self, $type, $tag) = @_;
my $type_data = $self->_get_header_type($type) || return;
my %data = $self->_tags_to_hash(@{$type_data});
return $data{$tag};
}
sub _tags_to_hash {
my ($self, @tags) = @_;
my %hash;
foreach my $tag (@tags) {
my ($this_tag, $value) = $tag =~ /^(\w\w):(.+)/;
$hash{$this_tag} = $value;
}
return %hash;
}
sub _get_header_type {
my ($self, $type) = @_;
my $fh = $self->fh() || return;
my $fh_id = $self->_fh_id;
$self->_get_header();
if (defined $self->{'_header'.$fh_id} && defined $self->{'_header'.$fh_id}->{$type}) {
return $self->{'_header'.$fh_id}->{$type};
}
return;
}
sub _get_header {
my $self = shift;
my $fh = $self->fh() || return;
my $fh_id = $self->_fh_id;
return if $self->{'_got_header'.$fh_id};
my $non_header;
while (<$fh>) {
if (/^@/) {
#@HD VN:1.0 GO:none SO:coordinate
#@SQ SN:1 LN:247249719 AS:NCBI36 UR:file:/nfs/sf8/G1K/ref/human_b36_female.fa M5:28f4ff5cf14f5931d0d531a901236378
#@RG ID:SRR003447 PL:ILLUMINA PU:BI.PE1.080723_SL-XBH_0003_FC3044EAAXX.7 LB:Solexa-5453 PI:500 SM:NA11918 CN:BI
#@PG ID:xxxx VN:xxx CL:xxx
my @tags = split("\t", $_);
my $type = shift @tags;
$type = substr($type, 1);
if ($type eq 'HD') {
# we only expect and handle one of these lines per file
$self->{'_header'.$fh_id}->{$type} = \@tags;
}
else {
push(@{$self->{'_header'.$fh_id}->{$type}}, \@tags);
}
}
else {
# allow header line to not be present
$non_header = $_;
last;
}
}
$self->{'_got_header'.$fh_id} = 1;
$self->{'_first_record'.$fh_id} = $non_header;
}
=head2 get_fields
Title : get_fields
Usage : $obj->get_fields('QNAME', 'FLAG', 'RG');
Function: For efficiency reasons, next_result() will not parse each result at
all by default, so your result_holder will be empty. Use this method
to choose which values you need to parse out. Your result_holder
hash will then be populated with those only.
Returns : n/a
Args : list of desired fields. Valid ones are:
QNAME
FLAG
RNAME
POS
MAPQ
CIGAR
MRNM
MPOS
ISIZE
SEQ
QUAL
additionaly, there are the psuedo-fields 'SEQ_LENGTH' to get the
raw length of the read (including hard/soft clipped bases) and
'MAPPED_SEQ_LENGTH' (only bases that match or mismatch to the
reference, ie. cigar operator M).
furthermore you can also request optional tags, such as 'RG'.
=cut
sub get_fields {
my ($self, @fields) = @_;
$self->{_fields} = [@fields];
if (@fields) {
$self->_do_fields(1);
}
else {
$self->_do_fields(0);
}
}
=head2 result_holder
Title : result_holder
Usage : my $result_holder = $obj->result_holder()
Function: Get the data structure that will hold the last result requested by
next_result()
Returns : hash ref, with keys corresponding to what you chose in get_fields().
If you never called get_fields(), the hash will be empty.
If you requested a tag and it wasn't present, the value will be set
to '*'.
Args : n/a
=cut
=head2 region
Title : region
Usage : $obj->region('3:10000-11000');
Function: Specify the chromosomal region that next_result() will get alignments
from. The bam file must have previously been indexed to create a .bai
file, and the bam must be coordinate sorted.
Subsequently setting this to '' or undef will make next_result start
behaving like normal, starting from the first alignment.
Returns : n/a
Args : A region specification string (as understood by samtools view)
=cut
sub region {
my ($self, $region) = @_;
if (defined $region) {
if (length($region) > 0) {
$self->_do_region(1);
$self->{_region} = $region;
}
else {
$self->{_region} ? $self->_reset_region() : $self->_do_region(0);
delete $self->{_region};
}
}
else {
$self->{_region} ? $self->_reset_region() : $self->_do_region(0);
delete $self->{_region};
}
}
=head2 flag_selector
Title : flag_selector
Usage : $obj->flag_selector(self_unmapped => 1, mate_unmapped => 1);
Function: Alter next_result() so that it only returns alignments with flags
that match your settings. This method is just a convient way of
calculating flags; ultimately it just sets required_flag() and
filtering_flag().
It is recommended to use this instead of get_fields("FLAG") and
handling the flag yourself.
Returns : n/a
Args : hash, where keys are amongst the following, and values are boolean,
true to require that flag, false to prohibit it:
paired_tech
paired_map
self_unmapped
mate_unmapped
self_reverse
mate_reverse
'1st_in_pair'
'2nd_in_pair'
not_primary
failed_qc
duplicate
=cut
sub flag_selector {
my ($self, %args) = @_;
my ($require, $filter) = (0, 0);
while (my ($name, $bool) = each %args) {
my $flag = $flags{$name} || $self->throw("'$name' is not a valid flag name");
if ($bool) {
$require |= $flag;
}
else {
$filter |= $flag;
}
}
$self->required_flag($require);
$self->filtering_flag($filter);
}
=head2 required_flag
Title : required_flag
Usage : $obj->required_flag(4);
Function: Require that the flag field of an alignment match the desired bitwise
flag. This alters what next_result() will return, and is faster than
using get_fields('FLAG') and working out the match yourself.
Returns : n/a
Args : int (flag)
=cut
=head2 filtering_flag
Title : filtering_flag
Usage : $obj->filtering_flag(4);
Function: Require that the flag field of an alignment not match the desired
bitwise flag. This alters what next_result() will return, and is
faster than using get_fields('FLAG') and working out the match
yourself.
Returns : n/a
Args : int (flag)
=cut
=head2 minimum_quality
Title : minimum_quality
Usage : $obj->minimum_quality(30);
Function: Require that the mapping quality field of an alignment be greater
than or equal to a desired quality. This alters what next_result()
will return, and is faster than using get_fields('MAPQ') and working
out the comparison yourself.
Returns : n/a
Args : int (flag)
=cut
=head2 required_library
Title : required_library
Usage : $obj->required_library('my_library_name');
Function: Alters next_result() so that it will only return alignments for reads
that came from the given library.
Returns : n/a
Args : int (flag)
=cut
=head2 required_readgroup
Title : required_readgroup
Usage : $obj->required_readgroup('my_readgroup_name');
Function: Alters next_result() so that it will only return alignments for reads
with the given RG tag. This is faster than using get_fields('RG') and
working out the match yourself.
Returns : n/a
Args : int (flag)
=cut
=head2 next_result
Title : next_result
Usage : while ($obj->next_result()) { # look in result_holder }
Function: Access the next alignment from the bam file.
Returns : boolean (false at end of output; check the result_holder for the
actual result information)
Args : n/a
=cut
=head2 write_result
Title : write_result
Usage : $obj->write_result("out.bam");
Function: Write the most recent result retrieved with next_result() (not
just the fields you got - the whole thing) out to a new bam file
(which will inherit its header from the input bam you're parsing).
Calling ignore_tags_on_write() before this will modify what is
written.
Returns : n/a
Args : output bam file
=cut
=head2 ignore_tags_on_write
Title : ignore_tags_on_write
Usage : $obj->ignore_tags_on_write(qw(OQ XM XG XO));
Function: When using write(), ignore the given tags so that they will not be
output. You only need to call this once (don't put it in your
next_result loop).
Returns : n/a
Args : list of tags to ignore
=cut
sub ignore_tags_on_write {
my ($self, @tags) = @_;
$self->{_ignore_tags} = [@tags];
if (@tags) {
$self->_do_strip(1);
}
else {
$self->_do_strip(0);
}
}
use Inline C => <<'END_C';
#include "bam.h"
// vars and method for skipping things
static int g_do_region = 0, g_do_iter = 0, g_do_fields = 0, g_do_strip = 0;
static int g_call_skip = 0, g_min_mapQ = 0, g_flag_on = 0, g_flag_off = 0, g_try_library = 0, g_try_rg = 0;
static inline int __g_skip_aln(SV* self, bam1_t *b, bam_header_t *header) {
// ripped from sam_view.c
if (b->core.qual < g_min_mapQ || ((b->core.flag & g_flag_on) != g_flag_on) || (b->core.flag & g_flag_off)) {
return 1;
}
if (g_try_rg || g_try_library) {
HV* self_hash;
self_hash = (HV*)SvRV(self);
if (g_try_rg) {
char *g_rg;
g_rg = SvPV_nolen(*(hv_fetch(self_hash, "_req_rg", 7, 0)));
if (g_rg) {
uint8_t *s = bam_aux_get(b, "RG");
if (s) {
return (strcmp(g_rg, (char*)(s + 1)) == 0)? 0 : 1;
}
Safefree(g_rg);
}
}
if (g_try_library) {
char *g_library;
g_library = SvPV_nolen(*(hv_fetch(self_hash, "_req_lib", 8, 0)));
if (g_library) {
const char *p = bam_get_library((bam_header_t*)header, b);
return (p && strcmp(p, g_library) == 0)? 0 : 1;
Safefree(g_library);
}
}
}
return 0;
}
// new() needs to reset all the static vars
void _reset(SV* self) {
g_do_region = 0;
g_do_iter = 0;
g_do_fields = 0;
g_do_strip = 0;
g_call_skip = 0;
g_min_mapQ = 0;
g_flag_on = 0;
g_flag_off = 0;
g_try_library = 0;
g_try_rg = 0;
}
// methods to set what we'd like to require/filter
void required_flag(SV* self, char* input) {
int flag;
flag = strtol(input, NULL, 0);
g_flag_on = flag;
g_call_skip = flag > 0 ? 1 : g_call_skip;
}
void filtering_flag(SV* self, char* input) {
int flag;
flag = strtol(input, NULL, 0);
g_flag_off = flag;
g_call_skip = flag > 0 ? 1 : g_call_skip;
}
void minimum_quality(SV* self, char* input) {
int qual;
qual = strtol(input, NULL, 0);
g_min_mapQ = qual;
g_call_skip = g_min_mapQ > 0 ? 1 : g_call_skip;
}
void required_library(SV* self, char* input) {
HV* self_hash;
self_hash = (HV*)SvRV(self);
int len;
len = strlen(input);
if (len > 0) {
hv_store(self_hash, "_req_lib", 8, newSVpv(input, len), 0);
g_call_skip = 1;
g_try_library = 1;
}
else {
g_try_library = 0;
}
}
void required_readgroup(SV* self, char* input) {
HV* self_hash;
self_hash = (HV*)SvRV(self);
int len;
len = strlen(input);
if (len > 0) {
hv_store(self_hash, "_req_rg", 7, newSVpv(input, len), 0);
g_call_skip = 1;
g_try_rg = 1;
}
else {
g_try_rg = 0;
}
}
// methods to open bams
void _initialize_bam(SV* self, char* bamfile) {
bamFile *bam;
bam = bam_open(bamfile, "r");
bgzf_seek(bam, 0, 0);
bam1_t *b;
b = bam_init1();
bam_header_t *header;
header = bam_header_read(bam);
Inline_Stack_Vars;
Inline_Stack_Reset;
Inline_Stack_Push(newRV_noinc(newSViv(header)));
Inline_Stack_Push(newRV_noinc(newSViv(bam)));
Inline_Stack_Push(newRV_noinc(newSViv(b)));
Inline_Stack_Done;
}
bamFile _initialize_obam(SV* self, char* bamfile) {
bamFile *obam;
obam = bam_open(bamfile, "w");
HV* self_hash;
self_hash = (HV*)SvRV(self);
SV* header_ref;
bam_header_t *header;
header_ref = *(hv_fetch(self_hash, "_chead", 6, 0));
header = (bam_header_t*)SvIV(SvRV(header_ref));
bam_header_write(obam, header);
return obam;
}
// called by ->close and during destruction, will apply to both input and
// outputs
void _close_bam(SV* self, SV* bam_ref) {
bamFile *bam;
bam = (bamFile*)SvIV(SvRV(bam_ref));
bam_close(bam);
}
void _close_idx(SV* self) {
HV* self_hash;
self_hash = (HV*)SvRV(self);
if (hv_exists(self_hash, "_cidx", 5)) {
bam_index_t *idx;
idx = (bam_index_t*)SvIV(SvRV(*(hv_fetch(self_hash, "_cidx", 5, 0))));
bam_index_destroy(idx);
hv_delete(self_hash, "_cidx", 5, G_DISCARD);
}
}
// the main parsing method
int next_result(SV* self) {
HV* self_hash;
self_hash = (HV*)SvRV(self);
if (! hv_exists(self_hash, "_cbam", 5)) {
return 0;
}
SV* bam_ref;
bam_ref = *(hv_fetch(self_hash, "_cbam", 5, 0));
bamFile *bam;
bam = (bamFile*)SvIV(SvRV(bam_ref));
SV* b_ref;
b_ref = *(hv_fetch(self_hash, "_cb", 3, 0));
bam1_t *b;
b = (bam1_t*)SvIV(SvRV(b_ref));
SV* header_ref;
bam_header_t *header;
header_ref = *(hv_fetch(self_hash, "_chead", 6, 0));
header = (bam_header_t*)SvIV(SvRV(header_ref));
// loop to see if we can find a record that passes filters before eof
int ret;
bam_iter_t iter;
do {
if (g_do_region) {
char* region;
region = SvPV_nolen(*(hv_fetch(self_hash, "_region", 7, 0)));
bam_index_t *idx = 0;
if (hv_exists(self_hash, "_cidx", 5)) {
idx = (bam_index_t*)SvIV(SvRV(*(hv_fetch(self_hash, "_cidx", 5, 0))));
}
else {
char* filename;
filename = SvPV_nolen(*(hv_fetch(self_hash, "_filename", 9, 0)));
idx = bam_index_load(filename);
if (idx == 0) {
fprintf(stderr, "region() can only be used when there is a .bai file (none seen for %s).\n", filename);
ret = -1;
}
else {
hv_store(self_hash, "_cidx", 5, newRV_noinc(newSViv(idx)), 0);
}
}
if (idx != 0) {
int tid, beg, end;
bam_parse_region(header, region, &tid, &beg, &end);
if (tid < 0) {
fprintf(stderr, "region \"%s\" specifies an unknown reference name. Can't continue.\n", region);
ret = -1;
}
else {
iter = bam_iter_query(idx, tid, beg, end);
ret = bam_iter_read(bam, iter, b);
hv_store(self_hash, "_iter", 5, newRV_noinc(newSViv(iter)), 0);
g_do_region = 0;
g_do_iter = 1;
}
}
}
else if (g_do_iter) {
iter = (bam_iter_t*)SvIV(SvRV(*(hv_fetch(self_hash, "_iter", 5, 0))));
ret = bam_iter_read(bam, iter, b);
}
else {
ret = bam_read1(bam, b);
}
} while ( ret >= 0 && g_call_skip ? __g_skip_aln(self, b, header) : 0 );
// parse out fields if requested, return if we got a record or not
if (ret >= 0) {
SV* rh_ref;
rh_ref = *(hv_fetch(self_hash, "_result_holder", 14, 0));
HV* rh_hash;
rh_hash = (HV*)SvRV(rh_ref);
hv_clear(rh_hash);
if (g_do_fields) {
SV* fields_ref;
fields_ref = *(hv_fetch(self_hash, "_fields", 7, 0));
AV* fields_array;
fields_array = (AV*)SvRV(fields_ref);
I32* fields_maxi;
fields_maxi = av_len(fields_array);
if (fields_maxi >= 0) {
uint8_t *tag_value;
int type;
int32_t tid;
uint32_t *cigar;
int cigar_loop;
AV *cigar_avref;
char *cigar_str;
char *cigar_digits;
int cigar_digits_length;
int cigar_digits_i;
int cigar_chars_total;
char *cigar_op;
int cigar_op_length;
int raw_seq_length;
int mapped_seq_length;
char *seq;
int seq_i;
uint8_t *qual;
int qual_i;
char *qual_str;
int i;
char *field;
STRLEN field_length;
for (i = 0; i <= fields_maxi; i++) {
field = SvPV(*(av_fetch(fields_array, i, 0)), field_length);
if (field_length > 2) {
if (strEQ(field, "QNAME")) {
hv_store(rh_hash, field, field_length, newSVpv(bam1_qname(b), 0), 0);
}
else if (strEQ(field, "FLAG")) {
hv_store(rh_hash, field, field_length, newSVuv(b->core.flag), 0);
}
else if (strEQ(field, "RNAME")) {
if (b->core.tid < 0) {
hv_store(rh_hash, field, field_length, newSVpv("*", 1), 0);
}
else {
hv_store(rh_hash, field, field_length, newSVpv(header->target_name[b->core.tid], 0), 0);
}
}
else if (strEQ(field, "POS")) {
hv_store(rh_hash, field, field_length, newSVuv(b->core.pos + 1), 0);
}
else if (strEQ(field, "MAPQ")) {
hv_store(rh_hash, field, field_length, newSVuv(b->core.qual), 0);
}
else if (strEQ(field, "CIGAR")) {
if (b->core.n_cigar == 0) {
hv_store(rh_hash, field, field_length, newSVpv("*", 1), 0);
}
else {
cigar = bam1_cigar(b);
cigar_str = Newxz(cigar_str, b->core.n_cigar * 5, char);
cigar_chars_total = 0;
cigar_digits = Newxz(cigar_digits, 3, char);
for (cigar_loop = 0; cigar_loop < b->core.n_cigar; ++cigar_loop) {
Renew(cigar_digits, 3, char);
cigar_digits_length = sprintf(cigar_digits, "%i", cigar[cigar_loop]>>BAM_CIGAR_SHIFT);
for (cigar_digits_i = 0; cigar_digits_i < cigar_digits_length; ++cigar_digits_i) {
cigar_str[cigar_chars_total] = cigar_digits[cigar_digits_i];
cigar_chars_total++;
}
cigar_str[cigar_chars_total] = "MIDNSHP"[cigar[cigar_loop]&BAM_CIGAR_MASK];
cigar_chars_total++;
}
hv_store(rh_hash, field, field_length, newSVpv(cigar_str, cigar_chars_total), 0);
Safefree(cigar_str);
Safefree(cigar_digits);
}
}
else if (strEQ(field, "MRNM")) {
if (b->core.mtid < 0) {
hv_store(rh_hash, field, field_length, newSVpv("*", 1), 0);
}
else {
hv_store(rh_hash, field, field_length, newSVpv(header->target_name[b->core.mtid], 0), 0);
}
}
else if (strEQ(field, "MPOS")) {
hv_store(rh_hash, field, field_length, newSVuv(b->core.mpos + 1), 0);
}
else if (strEQ(field, "ISIZE")) {
hv_store(rh_hash, field, field_length, newSViv((int*)b->core.isize), 0);
}
else if (strEQ(field, "SEQ_LENGTH") || strEQ(field, "MAPPED_SEQ_LENGTH")) {
if (b->core.n_cigar == 0) {
if (b->core.l_qseq) {
hv_store(rh_hash, field, field_length, newSVuv(b->core.l_qseq), 0);
}
else {
hv_store(rh_hash, field, field_length, newSVuv(0), 0);
}
}
else {
cigar = bam1_cigar(b);
raw_seq_length = 0;
mapped_seq_length = 0;
for (cigar_loop = 0; cigar_loop < b->core.n_cigar; ++cigar_loop) {
cigar_op_length = cigar[cigar_loop]>>BAM_CIGAR_SHIFT;
cigar_op = "MIDNSHP"[cigar[cigar_loop]&BAM_CIGAR_MASK];
if (cigar_op == 'S' || cigar_op == 'H' || cigar_op == 'I') {
raw_seq_length = raw_seq_length + cigar_op_length;
}
else if (cigar_op == 'M') {
raw_seq_length = raw_seq_length + cigar_op_length;
mapped_seq_length = mapped_seq_length + cigar_op_length;
}
}
if (strEQ(field, "SEQ_LENGTH")) {
hv_store(rh_hash, field, field_length, newSVuv(raw_seq_length), 0);
}
else {
hv_store(rh_hash, field, field_length, newSVuv(mapped_seq_length), 0);
}
}
}
else if (strEQ(field, "SEQ")) {
if (b->core.l_qseq) {
seq = Newxz(seq, b->core.l_qseq + 1, char);
for (seq_i = 0; seq_i < b->core.l_qseq; ++seq_i) {
seq[seq_i] = bam_nt16_rev_table[bam1_seqi(bam1_seq(b), seq_i)];
}
hv_store(rh_hash, field, field_length, newSVpv(seq, b->core.l_qseq), 0);
Safefree(seq);
}
else {
hv_store(rh_hash, field, field_length, newSVpv("*", 1), 0);
}
}
else if (strEQ(field, "QUAL")) {
if (b->core.l_qseq) {
qual = bam1_qual(b);
if (qual[0] != 0xff) {
qual_str = Newxz(qual_str, b->core.l_qseq + 1, char);
for (qual_i = 0; qual_i < b->core.l_qseq; ++qual_i) {
qual_str[qual_i] = qual[qual_i] + 33;
}
hv_store(rh_hash, field, field_length, newSVpv(qual_str, b->core.l_qseq), 0);
Safefree(qual_str);
}
else {
hv_store(rh_hash, field, field_length, newSVpv("*", 1), 0);
}
}
else {
hv_store(rh_hash, field, field_length, newSVpv("*", 1), 0);
}
}
}
else {
tag_value = bam_aux_get(b, field);
if (tag_value != 0) {
type = *tag_value++;
switch (type) {
case 'c':
hv_store(rh_hash, field, field_length, newSViv((int32_t)*(int8_t*)tag_value), 0);
break;
case 'C':
hv_store(rh_hash, field, field_length, newSViv((int32_t)*(uint8_t*)tag_value), 0);
break;
case 's':
hv_store(rh_hash, field, field_length, newSViv((int32_t)*(int16_t*)tag_value), 0);
break;
case 'S':
hv_store(rh_hash, field, field_length, newSViv((int32_t)*(uint16_t*)tag_value), 0);
break;
case 'i':
hv_store(rh_hash, field, field_length, newSViv(*(int32_t*)tag_value), 0);
break;
case 'I':
hv_store(rh_hash, field, field_length, newSViv((int32_t)*(uint32_t*)tag_value), 0);
break;
case 'f':
hv_store(rh_hash, field, field_length, newSVnv(*(float*)tag_value), 0);
break;
case 'A':
hv_store(rh_hash, field, field_length, newSVpv((char*)tag_value, 1), 0);
break;
case 'Z':
case 'H':
hv_store(rh_hash, field, field_length, newSVpv((char*)tag_value, 0), 0);
break;
}
}
else {
hv_store(rh_hash, field, field_length, newSVpv("*", 1), 0);
}
}
}
}
}
return 1;
}
else {
return 0;
}
}
// create an output bam that is just the header of the input bam
void _create_no_record_output_bam(SV* self, char* bamfile) {
bamFile *obam;
obam = _initialize_obam(self, bamfile);
bam_close(obam);
}
// write out the most recently read input bam record
void write_result(SV* self, char* bamfile) {
HV* self_hash;
self_hash = (HV*)SvRV(self);
if (! hv_exists(self_hash, "_cb", 3)) {
return;
}
SV* b_ref;
b_ref = *(hv_fetch(self_hash, "_cb", 3, 0));
bam1_t *b;
b = (bam1_t*)SvIV(SvRV(b_ref));
int len;
len = strlen(bamfile);
if (! len >= 1) {
return;
}
SV* writes_ref;
writes_ref = *(hv_fetch(self_hash, "_writes", 7, 0));
HV* writes_hash;
writes_hash = (HV*)SvRV(writes_ref);
bamFile *obam;
if (! hv_exists(writes_hash, bamfile, len)) {
obam = _initialize_obam(self, bamfile);
hv_store(writes_hash, bamfile, len, newRV_noinc(newSViv(obam)), 0);
}
else {
SV* obam_ref;
obam_ref = *(hv_fetch(writes_hash, bamfile, len, 0));
obam = (bamFile*)SvIV(SvRV(obam_ref));
}
if (g_do_strip) {
SV* ignore_ref;
ignore_ref = *(hv_fetch(self_hash, "_ignore_tags", 12, 0));
AV* ignore_array;
ignore_array = (AV*)SvRV(ignore_ref);
I32* ignore_maxi;
ignore_maxi = av_len(ignore_array);
if (ignore_maxi >= 0) {
char *tag;
uint8_t *tag_value;
int i;
for (i = 0; i <= ignore_maxi; i++) {
tag = SvPV_nolen(*(av_fetch(ignore_array, i, 0)));
tag_value = bam_aux_get(b, tag);
if (tag_value) {
bam_aux_del(b, tag_value);
}
}
}
}
bam_write1(obam, b);
}
// methods to minimise hash lookups
void _do_fields(SV* self, char* input) {
int boolean;
boolean = strtol(input, NULL, 0);
g_do_fields = boolean;
}
void _do_strip(SV* self, char* input) {
int boolean;
boolean = strtol(input, NULL, 0);
g_do_strip = boolean;
}
void _do_region(SV* self, char* input) {
int boolean;
boolean = strtol(input, NULL, 0);
g_do_region = boolean;
g_do_iter = 0;
}
void _reset_region(SV* self) {
// for some unknown reason just doing bam_read1 from the current
// position behaves inconsistently and not as expected, so we seek
// to the start when region is unset by the user
HV* self_hash;
self_hash = (HV*)SvRV(self);
if (hv_exists(self_hash, "_cbam", 5)) {
SV* bam_ref;
bam_ref = *(hv_fetch(self_hash, "_cbam", 5, 0));
bamFile *bam;
bam = (bamFile*)SvIV(SvRV(bam_ref));
bam_seek(bam,0,0);
bam_header_read(bam);
}
g_do_region = 0;
g_do_iter = 0;
}
END_C
1;
| 31.474533 | 146 | 0.530266 |
ed8b8230f12fe2330d052317e9c1b8282d160093 | 766 | pm | Perl | lib/Weather/YR/Model.pm | gitpan/Weather-YR | 76c2a6fa2b4ab6d9fff0e4376c8ab4b1db6c346d | [
"Artistic-1.0"
] | null | null | null | lib/Weather/YR/Model.pm | gitpan/Weather-YR | 76c2a6fa2b4ab6d9fff0e4376c8ab4b1db6c346d | [
"Artistic-1.0"
] | null | null | null | lib/Weather/YR/Model.pm | gitpan/Weather-YR | 76c2a6fa2b4ab6d9fff0e4376c8ab4b1db6c346d | [
"Artistic-1.0"
] | null | null | null | package Weather::YR::Model;
use Moose;
use namespace::autoclean;
=head1 NAME
Weather::YR::Model - Base class for model classes.
=head1 DESCRIPTION
Don't use this class directly. It's used as a "helper class" for other
classes.
=head1 METHODS
=head2 from
Returns this model's "from" date as a DateTime object.
=cut
has 'from' => (
isa => 'DateTime',
is => 'ro',
required => 1,
);
=head2 to
Returns this model's "to" date as a DateTime object.
=cut
has 'to' => (
isa => 'DateTime',
is => 'ro',
required => 1,
);
=head2 lang
Returns this model's language setting.
=cut
has 'lang' => (
isa => 'Str',
is => 'ro',
required => 1,
);
#
# The End
#
__PACKAGE__->meta->make_immutable;
1;
| 13.206897 | 70 | 0.593995 |
edbdcde7602670c04ce80b17c81d5ef008b2d80f | 1,471 | t | Perl | t/message_size.t | ens-ds23/ensembl-rest | eded86aa5c1769d3442b932c630693dd548cacad | [
"Apache-2.0"
] | null | null | null | t/message_size.t | ens-ds23/ensembl-rest | eded86aa5c1769d3442b932c630693dd548cacad | [
"Apache-2.0"
] | null | null | null | t/message_size.t | ens-ds23/ensembl-rest | eded86aa5c1769d3442b932c630693dd548cacad | [
"Apache-2.0"
] | null | null | null | # Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
# Copyright [2016-2020] EMBL-European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
use strict;
use warnings;
BEGIN {
use FindBin qw/$Bin/;
use lib "$Bin/lib";
use RestHelper;
$ENV{CATALYST_CONFIG} = "$Bin/../ensembl_rest_testing.conf";
$ENV{ENS_REST_LOG4PERL} = "$Bin/../log4perl_testing.conf";
}
use Test::More;
use Catalyst::Test ();
use Bio::EnsEMBL::Test::MultiTestDB;
use Bio::EnsEMBL::Test::TestUtils;
my $dba = Bio::EnsEMBL::Test::MultiTestDB->new();
my $multi = Bio::EnsEMBL::Test::MultiTestDB->new('multi');
Catalyst::Test->import('EnsEMBL::REST');
my @ids = map {qq/$_/} ( 1..100 );
my $big_message = '{ "ids" : ['.join(',',@ids).'] }';
debug($big_message);
action_bad_post('/vep/homo_sapiens/id',$big_message,qr/POST message too large/,'Throw massive message to test over-large submissions');
done_testing();
| 34.209302 | 135 | 0.723317 |
73f5168bd47a426234fd47a57d562cc7c5a2f497 | 17,745 | pm | Perl | tools/intogen/external/lib/perl/Bio/EnsEMBL/MappedSliceContainer.pm | globusgenomics/galaxy | 7caf74d9700057587b3e3434c64e82c5b16540f1 | [
"CC-BY-3.0"
] | 1 | 2021-02-05T13:19:58.000Z | 2021-02-05T13:19:58.000Z | tools/intogen/external/lib/perl/Bio/EnsEMBL/MappedSliceContainer.pm | globusgenomics/genomics-galaxy | 7caf74d9700057587b3e3434c64e82c5b16540f1 | [
"CC-BY-3.0"
] | null | null | null | tools/intogen/external/lib/perl/Bio/EnsEMBL/MappedSliceContainer.pm | globusgenomics/genomics-galaxy | 7caf74d9700057587b3e3434c64e82c5b16540f1 | [
"CC-BY-3.0"
] | null | null | null | =head1 LICENSE
Copyright (c) 1999-2012 The European Bioinformatics Institute and
Genome Research Limited. All rights reserved.
This software is distributed under a modified Apache license.
For license details, please see
http://www.ensembl.org/info/about/code_licence.html
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <[email protected]>.
Questions may also be sent to the Ensembl help desk at
<[email protected]>.
=cut
=head1 NAME
Bio::EnsEMBL::MappedSliceContainer - container for mapped slices
=head1 SYNOPSIS
# get a reference slice
my $slice =
$slice_adaptor->fetch_by_region( 'chromosome', 14, 900000, 950000 );
# create MappedSliceContainer based on the reference slice
my $msc = Bio::EnsEMBL::MappedSliceContainer->new( -SLICE => $slice );
# set the adaptor for fetching AssemblySlices
my $asa = $slice->adaptor->db->get_AssemblySliceAdaptor;
$msc->set_AssemblySliceAdaptor($asa);
# add an AssemblySlice to your MappedSliceContainer
$msc->attach_AssemblySlice('NCBIM36');
foreach my $mapped_slice ( @{ $msc->get_all_MappedSlices } ) {
print $mapped_slice->name, "\n";
foreach my $sf ( @{ $mapped_slice->get_all_SimpleFeatures } ) {
print " ", &to_string($sf), "\n";
}
}
=head1 DESCRIPTION
NOTE: this code is under development and not fully functional nor tested
yet. Use only for development.
A MappedSliceContainer holds a collection of one or more
Bio::EnsEMBL::MappedSlices. It is based on a real reference slice and
contains an artificial "container slice" which defines the common
coordinate system used by all attached MappedSlices. There is also a
mapper to convert coordinates between the reference and the container
slice.
Attaching MappedSlices to the container is delegated to adaptors
(which act more as object factories than as traditional Ensembl db
adaptors). The adaptors will also modify the container slice and
associated mapper if required. This design allows us to keep the
MappedSliceContainer generic and encapsulate the data source specific
code in the adaptor/factory module.
In the simplest use case, all required MappedSlices are attached to the
MappedSliceContainer at once (by a single call to the adaptor). This
object should also allow "hot-plugging" of MappedSlices (e.g. attach a
MappedSlice representing a strain to a container that already contains a
multi-species alignment). The methods for attaching new MappedSlice will
be responsable to perform the necessary adjustments to coordinates and
mapper on the existing MappedSlices.
=head1 METHODS
new
set_adaptor
get_adaptor
set_AssemblySliceAdaptor
get_AssemblySliceAdaptor
set_AlignSliceAdaptor (not implemented yet)
get_AlignSliceAdaptor (not implemented yet)
set_StrainSliceAdaptor (not implemented yet)
get_StrainSliceAdaptor (not implemented yet)
attach_AssemblySlice
attach_AlignSlice (not implemented yet)
attach_StrainSlice (not implemented yet)
get_all_MappedSlices
sub_MappedSliceContainer (not implemented yet)
ref_slice
container_slice
mapper
expanded
=head1 RELATED MODULES
Bio::EnsEMBL::MappedSlice
Bio::EnsEMBL::DBSQL::AssemblySliceAdaptor
Bio::EnsEMBL::Compara::AlignSlice
Bio::EnsEMBL::Compara::AlignSlice::Slice
Bio::EnsEMBL::AlignStrainSlice
Bio::EnsEMBL::StrainSlice
=cut
package Bio::EnsEMBL::MappedSliceContainer;
use strict;
use warnings;
no warnings 'uninitialized';
use Bio::EnsEMBL::Utils::Argument qw(rearrange);
use Bio::EnsEMBL::Utils::Exception qw(throw warning);
use Bio::EnsEMBL::CoordSystem;
use Bio::EnsEMBL::Slice;
use Bio::EnsEMBL::Mapper;
# define avalable adaptormajs to use with this container
my %adaptors = map { $_ => 1 } qw(assembly align strain);
=head2 new
Arg [SLICE] : Bio::EnsEMBL::Slice $slice - the reference slice for this
container
Arg [EXPANDED] : (optional) Boolean $expanded - set expanded mode (default:
collapsed)
Example : my $slice = $slice_adaptor->fetch_by_region('chromosome', 1,
9000000, 9500000);
my $msc = Bio::EnsEMBL::MappedSliceContainer->new(
-SLICE => $slice,
-EXPANDED => 1,
);
Description : Constructor. See the general documentation of this module for
details about this object. Note that the constructor creates an
empty container, so you'll have to attach MappedSlices to it to
be useful (this is usually done by an adaptor/factory).
Return type : Bio::EnsEMBL::MappedSliceContainer
Exceptions : thrown on wrong or missing argument
Caller : general
Status : At Risk
: under development
=cut
sub new {
my $caller = shift;
my $class = ref($caller) || $caller;
my ($ref_slice, $expanded) = rearrange([qw(SLICE EXPANDED)], @_);
# argument check
unless ($ref_slice and ref($ref_slice) and
($ref_slice->isa('Bio::EnsEMBL::Slice') or $ref_slice->isa('Bio::EnsEMBL::LRGSlice')) ) {
throw("You must provide a reference slice.");
}
my $self = {};
bless ($self, $class);
# initialise object
$self->{'ref_slice'} = $ref_slice;
$self->{'expanded'} = $expanded || 0;
$self->{'mapped_slices'} = [];
# create the container slice
$self->_create_container_slice($ref_slice);
return $self;
}
#
# Create an artificial slice which represents the common coordinate system used
# for this MappedSliceContainer
#
sub _create_container_slice {
my $self = shift;
my $ref_slice = shift;
# argument check
unless ($ref_slice and ref($ref_slice) and
($ref_slice->isa('Bio::EnsEMBL::Slice') or $ref_slice->isa('Bio::EnsEMBL::LRGSlice')) ) {
throw("You must provide a reference slice.");
}
# create an artificial coordinate system for the container slice
my $cs = Bio::EnsEMBL::CoordSystem->new(
-NAME => 'container',
-RANK => 1,
);
# Create a new artificial slice spanning your container. Initially this will
# simply span your reference slice
my $container_slice = Bio::EnsEMBL::Slice->new(
-COORD_SYSTEM => $cs,
-START => 1,
-END => $ref_slice->length,
-STRAND => 1,
-SEQ_REGION_NAME => 'container',
);
$self->{'container_slice'} = $container_slice;
# Create an Mapper to map to/from the reference slice to the container coord
# system.
my $mapper = Bio::EnsEMBL::Mapper->new('ref_slice', 'container');
$mapper->add_map_coordinates(
$ref_slice->seq_region_name,
$ref_slice->start,
$ref_slice->end,
1,
$container_slice->seq_region_name,
$container_slice->start,
$container_slice->end,
);
$self->{'mapper'} = $mapper;
}
=head2 set_adaptor
Arg[1] : String $type - the type of adaptor to set
Arg[2] : Adaptor $adaptor - the adaptor to set
Example : my $adaptor = Bio::EnsEMBL::DBSQL::AssemblySliceAdaptor->new;
$msc->set_adaptor('assembly', $adaptor);
Description : Parameterisable wrapper for all methods that set adaptors (see
below).
Return type : same as Arg 2
Exceptions : thrown on missing type
Caller : general
Status : At Risk
: under development
=cut
sub set_adaptor {
my $self = shift;
my $type = shift;
my $adaptor = shift;
# argument check
unless ($type and $adaptors{$type}) {
throw("Missing or unknown adaptor type.");
}
$type = ucfirst($type);
my $method = "set_${type}SliceAdaptor";
return $self->$method($adaptor);
}
=head2 get_adaptor
Arg[1] : String $type - the type of adaptor to get
Example : my $assembly_slice_adaptor = $msc->get_adaptor('assembly');
Description : Parameterisable wrapper for all methods that get adaptors (see
below).
Return type : An adaptor for the requested type of MappedSlice.
Exceptions : thrown on missing type
Caller : general
Status : At Risk
: under development
=cut
sub get_adaptor {
my $self = shift;
my $type = shift;
# argument check
unless ($type and $adaptors{$type}) {
throw("Missing or unknown adaptor type.");
}
$type = ucfirst($type);
my $method = "get_${type}SliceAdaptor";
return $self->$method;
}
=head2 set_AssemblySliceAdaptor
Arg[1] : Bio::EnsEMBL::DBSQL::AssemblySliceAdaptor - the adaptor to set
Example : my $adaptor = Bio::EnsEMBL::DBSQL::AssemblySliceAdaptor->new;
$msc->set_AssemblySliceAdaptor($adaptor);
Description : Sets an AssemblySliceAdaptor for this container. The adaptor can
be used to attach MappedSlice for alternative assemblies.
Return type : Bio::EnsEMBL::DBSQL::AssemblySliceAdaptor
Exceptions : thrown on wrong or missing argument
Caller : general, $self->get_adaptor
Status : At Risk
: under development
=cut
sub set_AssemblySliceAdaptor {
my $self = shift;
my $assembly_slice_adaptor = shift;
unless ($assembly_slice_adaptor and ref($assembly_slice_adaptor) and
$assembly_slice_adaptor->isa('Bio::EnsEMBL::DBSQL::AssemblySliceAdaptor')) {
throw("Need a Bio::EnsEMBL::AssemblySliceAdaptor.");
}
$self->{'adaptors'}->{'AssemblySlice'} = $assembly_slice_adaptor;
}
=head2 get_AssemblySliceAdaptor
Example : my $assembly_slice_adaptor = $msc->get_AssemblySliceAdaptor;
Description : Gets a AssemblySliceAdaptor from this container. The adaptor can
be used to attach MappedSlice for alternative assemblies.
Return type : Bio::EnsEMBL::DBSQL::AssemblySliceAdaptor
Exceptions : thrown on wrong or missing argument
Caller : general, $self->get_adaptor
Status : At Risk
: under development
=cut
sub get_AssemblySliceAdaptor {
my $self = shift;
unless ($self->{'adaptors'}->{'AssemblySlice'}) {
warning("No AssemblySliceAdaptor attached to MappedSliceContainer.");
}
return $self->{'adaptors'}->{'AssemblySlice'};
}
# [todo]
sub set_AlignSliceAdaptor {
throw("Not implemented yet!");
}
# [todo]
sub get_AlignSliceAdaptor {
throw("Not implemented yet!");
}
# [todo]
sub set_StrainSliceAdaptor {
my $self = shift;
my $strain_slice_adaptor = shift;
unless ($strain_slice_adaptor and ref($strain_slice_adaptor) and
$strain_slice_adaptor->isa('Bio::EnsEMBL::DBSQL::StrainSliceAdaptor')) {
throw("Need a Bio::EnsEMBL::StrainSliceAdaptor.");
}
$self->{'adaptors'}->{'StrainSlice'} = $strain_slice_adaptor;
}
# [todo]
sub get_StrainSliceAdaptor {
my $self = shift;
unless ($self->{'adaptors'}->{'StrainSlice'}) {
warning("No StrainSliceAdaptor attached to MappedSliceContainer.");
}
return $self->{'adaptors'}->{'StrainSlice'};
}
=head2 attach_AssemblySlice
Arg[1] : String $version - assembly version to attach
Example : $msc->attach_AssemblySlice('NCBIM36');
Description : Attaches a MappedSlice for an alternative assembly to this
container.
Return type : none
Exceptions : thrown on missing argument
Caller : general, Bio::EnsEMBL::DBSQL::AssemblySliceAdaptor
Status : At Risk
: under development
=cut
sub attach_AssemblySlice {
my $self = shift;
my $version = shift;
throw("Need a version.") unless ($version);
my $asa = $self->get_AssemblySliceAdaptor;
return unless ($asa);
my @mapped_slices = @{ $asa->fetch_by_version($self, $version) };
push @{ $self->{'mapped_slices'} }, @mapped_slices;
}
=head2 attach_StrainSlice
Arg[1] : String $strain - name of strain to attach
Example : $msc->attach_StrainSlice('Watson');
Description : Attaches a MappedSlice for an alternative strain to this
container.
Return type : none
Exceptions : thrown on missing argument
Caller : general, Bio::EnsEMBL::DBSQL::StrainSliceAdaptor
Status : At Risk
: under development
=cut
sub attach_StrainSlice {
my $self = shift;
my $strain = shift;
throw("Need a strain.") unless ($strain);
my $ssa = $self->get_StrainSliceAdaptor;
return unless ($ssa);
my @mapped_slices = @{ $ssa->fetch_by_name($self, $strain) };
push @{ $self->{'mapped_slices'} }, @mapped_slices;
}
=head2 get_all_MappedSlices
Example : foreach my $mapped_slice (@{ $msc->get_all_MappedSlices }) {
print $mapped_slice->name, "\n";
}
Description : Returns all MappedSlices attached to this container.
Return type : listref of Bio::EnsEMBL::MappedSlice
Exceptions : none
Caller : general
Status : At Risk
: under development
=cut
sub get_all_MappedSlices {
my $self = shift;
return $self->{'mapped_slices'};
}
# [todo]
sub sub_MappedSliceContainer {
throw("Not implemented yet!");
}
=head2 ref_slice
Arg[1] : (optional) Bio::EnsEMBL::Slice - the reference slice to set
Example : my $ref_slice = $mapped_slice_container->ref_slice;
print "This MappedSliceContainer is based on the reference
slice ", $ref_slice->name, "\n";
Description : Getter/setter for the reference slice.
Return type : Bio::EnsEMBL::Slice
Exceptions : thrown on wrong argument type
Caller : general
Status : At Risk
: under development
=cut
sub ref_slice {
my $self = shift;
if (@_) {
my $slice = shift;
unless (ref($slice) and ($slice->isa('Bio::EnsEMBL::Slice') or $slice->isa('Bio::EnsEMBL::LRGSlice'))) {
throw("Need a Bio::EnsEMBL::Slice.");
}
$self->{'ref_slice'} = $slice;
}
return $self->{'ref_slice'};
}
=head2 container_slice
Arg[1] : (optional) Bio::EnsEMBL::Slice - the container slice to set
Example : my $container_slice = $mapped_slice_container->container_slice;
print "The common slice used by this MappedSliceContainer is ",
$container_slice->name, "\n";
Description : Getter/setter for the container slice. This is an artificial
slice which defines the common coordinate system used by the
MappedSlices attached to this container.
Return type : Bio::EnsEMBL::Slice
Exceptions : thrown on wrong argument type
Caller : general
Status : At Risk
: under development
=cut
sub container_slice {
my $self = shift;
if (@_) {
my $slice = shift;
unless (ref($slice) and ($slice->isa('Bio::EnsEMBL::Slice') or $slice->isa('Bio::EnsEMBL::LRGSlice')) ) {
throw("Need a Bio::EnsEMBL::Slice.");
}
$self->{'container_slice'} = $slice;
}
return $self->{'container_slice'};
}
=head2 mapper
Arg[1] : (optional) Bio::EnsEMBL::Mapper - the mapper to set
Example : my $mapper = Bio::EnsEMBL::Mapper->new('ref', 'mapped');
$mapped_slice_container->mapper($mapper);
Description : Getter/setter for the mapper to map between reference slice and
the artificial container coord system.
Return type : Bio::EnsEMBL::Mapper
Exceptions : thrown on wrong argument type
Caller : internal, Bio::EnsEMBL::MappedSlice->AUTOLOAD
Status : At Risk
: under development
=cut
sub mapper {
my $self = shift;
if (@_) {
my $mapper = shift;
unless (ref($mapper) and $mapper->isa('Bio::EnsEMBL::Mapper')) {
throw("Need a Bio::EnsEMBL::Mapper.");
}
$self->{'mapper'} = $mapper;
}
return $self->{'mapper'};
}
=head2 expanded
Arg[1] : (optional) Boolean - expanded mode to set
Example : if ($mapped_slice_container->expanded) {
# do more elaborate mapping than in collapsed mode
[...]
}
Description : Getter/setter for expanded mode.
By default, MappedSliceContainer use collapsed mode, which
means that no inserts in the reference sequence are allowed
when constructing the MappedSlices. in this mode, the
mapped_slice artificial coord system will be identical with the
ref_slice coord system.
By setting expanded mode, you allow inserts in the reference
sequence.
Return type : Boolean
Exceptions : none
Caller : general
Status : At Risk
: under development
=cut
sub expanded {
my $self = shift;
$self->{'expanded'} = shift if (@_);
return $self->{'expanded'};
}
=head2 seq
Example : my $seq = $container->seq()
Description : Retrieves the expanded sequence of the artificial container
slice, including "-" characters where there are inserts in any
of the attached mapped slices.
Return type : String
Exceptions : none
Caller : general
Status : At Risk
: under development
=cut
sub seq {
my $self = shift;
my $container_seq = '';
# check there's a mapper
if(defined($self->mapper)) {
my $start = 0;
my $slice = $self->ref_slice();
my $seq = $slice->seq();
foreach my $coord($self->mapper->map_coordinates($slice->seq_region_name, $slice->start, $slice->end, $slice->strand, 'ref_slice')) {
# if it is a normal coordinate insert sequence
if(!$coord->isa('Bio::EnsEMBL::Mapper::IndelCoordinate')) {
$container_seq .= substr($seq, $start, $coord->length());
$start += $coord->length;
}
# if it is a gap or indel insert "-"
else {
$container_seq .= '-' x $coord->length();
}
}
}
return $container_seq;
}
1;
| 27.769953 | 137 | 0.659622 |
edb91d35c463225ea8fb1f3597e8266f13fd59a7 | 64,768 | pm | Perl | git/usr/share/perl5/core_perl/Pod/Perldoc.pm | BrianShin/Drupal---Example | 2d49bfeeed97cc19fc59c60d1caca51aa664fb0e | [
"Apache-2.0"
] | 9 | 2018-04-19T05:08:30.000Z | 2021-11-23T07:36:58.000Z | git/usr/share/perl5/core_perl/Pod/Perldoc.pm | BrianShin/Drupal---Example | 2d49bfeeed97cc19fc59c60d1caca51aa664fb0e | [
"Apache-2.0"
] | 98 | 2017-11-02T19:00:44.000Z | 2022-03-22T16:15:39.000Z | git/usr/share/perl5/core_perl/Pod/Perldoc.pm | BrianShin/Drupal---Example | 2d49bfeeed97cc19fc59c60d1caca51aa664fb0e | [
"Apache-2.0"
] | 9 | 2017-10-24T21:53:36.000Z | 2021-11-23T07:36:59.000Z | use 5.006; # we use some open(X, "<", $y) syntax
package Pod::Perldoc;
use strict;
use warnings;
use Config '%Config';
use Fcntl; # for sysopen
use File::Basename qw(basename);
use File::Spec::Functions qw(catfile catdir splitdir);
use vars qw($VERSION @Pagers $Bindir $Pod2man
$Temp_Files_Created $Temp_File_Lifetime
);
$VERSION = '3.25_03'; # patched in perl5.git
$VERSION =~ s/_//;
#..........................................................................
BEGIN { # Make a DEBUG constant very first thing...
unless(defined &DEBUG) {
if(($ENV{'PERLDOCDEBUG'} || '') =~ m/^(\d+)/) { # untaint
eval("sub DEBUG () {$1}");
die "WHAT? Couldn't eval-up a DEBUG constant!? $@" if $@;
} else {
*DEBUG = sub () {0};
}
}
}
use Pod::Perldoc::GetOptsOO; # uses the DEBUG.
use Carp qw(croak carp);
# these are also in BaseTo, which I don't want to inherit
sub debugging {
my $self = shift;
( defined(&Pod::Perldoc::DEBUG) and &Pod::Perldoc::DEBUG() )
}
sub debug {
my( $self, @messages ) = @_;
return unless $self->debugging;
print STDERR map { "DEBUG : $_" } @messages;
}
sub warn {
my( $self, @messages ) = @_;
carp( join "\n", @messages, '' );
}
sub die {
my( $self, @messages ) = @_;
croak( join "\n", @messages, '' );
}
#..........................................................................
sub TRUE () {1}
sub FALSE () {return}
sub BE_LENIENT () {1}
BEGIN {
*is_vms = $^O eq 'VMS' ? \&TRUE : \&FALSE unless defined &is_vms;
*is_mswin32 = $^O eq 'MSWin32' ? \&TRUE : \&FALSE unless defined &is_mswin32;
*is_dos = $^O eq 'dos' ? \&TRUE : \&FALSE unless defined &is_dos;
*is_os2 = $^O eq 'os2' ? \&TRUE : \&FALSE unless defined &is_os2;
*is_cygwin = ($^O eq 'cygwin' || $^O eq 'msys') ? \&TRUE : \&FALSE unless defined &is_cygwin;
*is_linux = $^O eq 'linux' ? \&TRUE : \&FALSE unless defined &is_linux;
*is_hpux = $^O =~ m/hpux/ ? \&TRUE : \&FALSE unless defined &is_hpux;
*is_amigaos = $^O eq 'amigaos' ? \&TRUE : \&FALSE unless defined &is_amigaos;
}
$Temp_File_Lifetime ||= 60 * 60 * 24 * 5;
# If it's older than five days, it's quite unlikely
# that anyone's still looking at it!!
# (Currently used only by the MSWin cleanup routine)
#..........................................................................
{ my $pager = $Config{'pager'};
push @Pagers, $pager if -x (split /\s+/, $pager)[0] or __PACKAGE__->is_vms;
}
$Bindir = $Config{'scriptdirexp'};
$Pod2man = "pod2man" . ( $Config{'versiononly'} ? $Config{'version'} : '' );
# End of class-init stuff
#
###########################################################################
#
# Option accessors...
foreach my $subname (map "opt_$_", split '', q{mhlDriFfXqnTdULva}) {
no strict 'refs';
*$subname = do{ use strict 'refs'; sub () { shift->_elem($subname, @_) } };
}
# And these are so that GetOptsOO knows they take options:
sub opt_a_with { shift->_elem('opt_a', @_) }
sub opt_f_with { shift->_elem('opt_f', @_) }
sub opt_q_with { shift->_elem('opt_q', @_) }
sub opt_d_with { shift->_elem('opt_d', @_) }
sub opt_L_with { shift->_elem('opt_L', @_) }
sub opt_v_with { shift->_elem('opt_v', @_) }
sub opt_w_with { # Specify an option for the formatter subclass
my($self, $value) = @_;
if($value =~ m/^([-_a-zA-Z][-_a-zA-Z0-9]*)(?:[=\:](.*?))?$/s) {
my $option = $1;
my $option_value = defined($2) ? $2 : "TRUE";
$option =~ tr/\-/_/s; # tolerate "foo-bar" for "foo_bar"
$self->add_formatter_option( $option, $option_value );
} else {
$self->warn( qq("$value" isn't a good formatter option name. I'm ignoring it!\n ) );
}
return;
}
sub opt_M_with { # specify formatter class name(s)
my($self, $classes) = @_;
return unless defined $classes and length $classes;
DEBUG > 4 and print "Considering new formatter classes -M$classes\n";
my @classes_to_add;
foreach my $classname (split m/[,;]+/s, $classes) {
next unless $classname =~ m/\S/;
if( $classname =~ m/^(\w+(::\w+)+)$/s ) {
# A mildly restrictive concept of what modulenames are valid.
push @classes_to_add, $1; # untaint
} else {
$self->warn( qq("$classname" isn't a valid classname. Ignoring.\n) );
}
}
unshift @{ $self->{'formatter_classes'} }, @classes_to_add;
DEBUG > 3 and print(
"Adding @classes_to_add to the list of formatter classes, "
. "making them @{ $self->{'formatter_classes'} }.\n"
);
return;
}
sub opt_V { # report version and exit
print join '',
"Perldoc v$VERSION, under perl v$] for $^O",
(defined(&Win32::BuildNumber) and defined &Win32::BuildNumber())
? (" (win32 build ", &Win32::BuildNumber(), ")") : (),
(chr(65) eq 'A') ? () : " (non-ASCII)",
"\n",
;
exit;
}
sub opt_t { # choose plaintext as output format
my $self = shift;
$self->opt_o_with('text') if @_ and $_[0];
return $self->_elem('opt_t', @_);
}
sub opt_u { # choose raw pod as output format
my $self = shift;
$self->opt_o_with('pod') if @_ and $_[0];
return $self->_elem('opt_u', @_);
}
sub opt_n_with {
# choose man as the output format, and specify the proggy to run
my $self = shift;
$self->opt_o_with('man') if @_ and $_[0];
$self->_elem('opt_n', @_);
}
sub opt_o_with { # "o" for output format
my($self, $rest) = @_;
return unless defined $rest and length $rest;
if($rest =~ m/^(\w+)$/s) {
$rest = $1; #untaint
} else {
$self->warn( qq("$rest" isn't a valid output format. Skipping.\n") );
return;
}
$self->aside("Noting \"$rest\" as desired output format...\n");
# Figure out what class(es) that could actually mean...
my @classes;
foreach my $prefix ("Pod::Perldoc::To", "Pod::Simple::", "Pod::") {
# Messy but smart:
foreach my $stem (
$rest, # Yes, try it first with the given capitalization
"\L$rest", "\L\u$rest", "\U$rest" # And then try variations
) {
$self->aside("Considering $prefix$stem\n");
push @classes, $prefix . $stem;
}
# Tidier, but misses too much:
#push @classes, $prefix . ucfirst(lc($rest));
}
$self->opt_M_with( join ";", @classes );
return;
}
###########################################################################
# % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
sub run { # to be called by the "perldoc" executable
my $class = shift;
if(DEBUG > 3) {
print "Parameters to $class\->run:\n";
my @x = @_;
while(@x) {
$x[1] = '<undef>' unless defined $x[1];
$x[1] = "@{$x[1]}" if ref( $x[1] ) eq 'ARRAY';
print " [$x[0]] => [$x[1]]\n";
splice @x,0,2;
}
print "\n";
}
return $class -> new(@_) -> process() || 0;
}
# % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % % %
###########################################################################
sub new { # yeah, nothing fancy
my $class = shift;
my $new = bless {@_}, (ref($class) || $class);
DEBUG > 1 and print "New $class object $new\n";
$new->init();
$new;
}
#..........................................................................
sub aside { # If we're in -D or DEBUG mode, say this.
my $self = shift;
if( DEBUG or $self->opt_D ) {
my $out = join( '',
DEBUG ? do {
my $callsub = (caller(1))[3];
my $package = quotemeta(__PACKAGE__ . '::');
$callsub =~ s/^$package/'/os;
# the o is justified, as $package really won't change.
$callsub . ": ";
} : '',
@_,
);
if(DEBUG) { print $out } else { print STDERR $out }
}
return;
}
#..........................................................................
sub usage {
my $self = shift;
$self->warn( "@_\n" ) if @_;
# Erase evidence of previous errors (if any), so exit status is simple.
$! = 0;
CORE::die( <<EOF );
perldoc [options] PageName|ModuleName|ProgramName|URL...
perldoc [options] -f BuiltinFunction
perldoc [options] -q FAQRegex
perldoc [options] -v PerlVariable
Options:
-h Display this help message
-V Report version
-r Recursive search (slow)
-i Ignore case
-t Display pod using pod2text instead of Pod::Man and groff
(-t is the default on win32 unless -n is specified)
-u Display unformatted pod text
-m Display module's file in its entirety
-n Specify replacement for groff
-l Display the module's file name
-F Arguments are file names, not modules
-D Verbosely describe what's going on
-T Send output to STDOUT without any pager
-d output_filename_to_send_to
-o output_format_name
-M FormatterModuleNameToUse
-w formatter_option:option_value
-L translation_code Choose doc translation (if any)
-X Use index if present (looks for pod.idx at $Config{archlib})
-q Search the text of questions (not answers) in perlfaq[1-9]
-f Search Perl built-in functions
-a Search Perl API
-v Search predefined Perl variables
PageName|ModuleName|ProgramName|URL...
is the name of a piece of documentation that you want to look at. You
may either give a descriptive name of the page (as in the case of
`perlfunc') the name of a module, either like `Term::Info' or like
`Term/Info', or the name of a program, like `perldoc', or a URL
starting with http(s).
BuiltinFunction
is the name of a perl function. Will extract documentation from
`perlfunc' or `perlop'.
FAQRegex
is a regex. Will search perlfaq[1-9] for and extract any
questions that match.
Any switches in the PERLDOC environment variable will be used before the
command line arguments. The optional pod index file contains a list of
filenames, one per line.
[Perldoc v$VERSION]
EOF
}
#..........................................................................
sub program_name {
my( $self ) = @_;
if( my $link = readlink( $0 ) ) {
$self->debug( "The value in $0 is a symbolic link to $link\n" );
}
my $basename = basename( $0 );
$self->debug( "\$0 is [$0]\nbasename is [$basename]\n" );
# possible name forms
# perldoc
# perldoc-v5.14
# perldoc-5.14
# perldoc-5.14.2
# perlvar # an alias mentioned in Camel 3
{
my( $untainted ) = $basename =~ m/(
\A
perl
(?: doc | func | faq | help | op | toc | var # Camel 3
)
(?: -? v? \d+ \. \d+ (?:\. \d+)? )? # possible version
(?: \. (?: bat | exe | com ) )? # possible extension
\z
)
/x;
$self->debug($untainted);
return $untainted if $untainted;
}
$self->warn(<<"HERE");
You called the perldoc command with a name that I didn't recognize.
This might mean that someone is tricking you into running a
program you don't intend to use, but it also might mean that you
created your own link to perldoc. I think your program name is
[$basename].
I'll allow this if the filename only has [a-zA-Z0-9._-].
HERE
{
my( $untainted ) = $basename =~ m/(
\A [a-zA-Z0-9._-]+ \z
)/x;
$self->debug($untainted);
return $untainted if $untainted;
}
$self->die(<<"HERE");
I think that your name for perldoc is potentially unsafe, so I'm
going to disallow it. I'd rather you be safe than sorry. If you
intended to use the name I'm disallowing, please tell the maintainers
about it. Write to:
Pod-Perldoc\@rt.cpan.org
HERE
}
#..........................................................................
sub usage_brief {
my $self = shift;
my $program_name = $self->program_name;
CORE::die( <<"EOUSAGE" );
Usage: $program_name [-hVriDtumFXlT] [-n nroffer_program]
[-d output_filename] [-o output_format] [-M FormatterModule]
[-w formatter_option:option_value] [-L translation_code]
PageName|ModuleName|ProgramName
Examples:
$program_name -f PerlFunc
$program_name -q FAQKeywords
$program_name -v PerlVar
$program_name -a PerlAPI
The -h option prints more help. Also try "$program_name perldoc" to get
acquainted with the system. [Perldoc v$VERSION]
EOUSAGE
}
#..........................................................................
sub pagers { @{ shift->{'pagers'} } }
#..........................................................................
sub _elem { # handy scalar meta-accessor: shift->_elem("foo", @_)
if(@_ > 2) { return $_[0]{ $_[1] } = $_[2] }
else { return $_[0]{ $_[1] } }
}
#..........................................................................
###########################################################################
#
# Init formatter switches, and start it off with __bindir and all that
# other stuff that ToMan.pm needs.
#
sub init {
my $self = shift;
# Make sure creat()s are neither too much nor too little
eval { umask(0077) }; # doubtless someone has no mask
if ( $] < 5.008 ) {
$self->aside("Your old perl doesn't have proper unicode support.");
}
else {
# http://www.perl.com/pub/2012/04/perlunicookbook-decode-argv-as-utf8.html
# Decode command line arguments as UTF-8. See RT#98906 for example problem.
use Encode qw(decode_utf8);
@ARGV = map { decode_utf8($_, 1) } @ARGV;
}
$self->{'args'} ||= \@ARGV;
$self->{'found'} ||= [];
$self->{'temp_file_list'} ||= [];
$self->{'target'} = undef;
$self->init_formatter_class_list;
$self->{'pagers' } = [@Pagers] unless exists $self->{'pagers'};
$self->{'bindir' } = $Bindir unless exists $self->{'bindir'};
$self->{'pod2man'} = $Pod2man unless exists $self->{'pod2man'};
$self->{'search_path'} = [ ] unless exists $self->{'search_path'};
push @{ $self->{'formatter_switches'} = [] }, (
# Yeah, we could use a hashref, but maybe there's some class where options
# have to be ordered; so we'll use an arrayref.
[ '__bindir' => $self->{'bindir' } ],
[ '__pod2man' => $self->{'pod2man'} ],
);
DEBUG > 3 and printf "Formatter switches now: [%s]\n",
join ' ', map "[@$_]", @{ $self->{'formatter_switches'} };
$self->{'translators'} = [];
$self->{'extra_search_dirs'} = [];
return;
}
#..........................................................................
sub init_formatter_class_list {
my $self = shift;
$self->{'formatter_classes'} ||= [];
# Remember, no switches have been read yet, when
# we've started this routine.
$self->opt_M_with('Pod::Perldoc::ToPod'); # the always-there fallthru
$self->opt_o_with('text');
$self->opt_o_with('term') unless $self->is_mswin32 || $self->is_dos || $self->is_amigaos
|| !($ENV{TERM} && (
($ENV{TERM} || '') !~ /dumb|emacs|none|unknown/i
));
return;
}
#..........................................................................
sub process {
# if this ever returns, its retval will be used for exit(RETVAL)
my $self = shift;
DEBUG > 1 and print " Beginning process.\n";
DEBUG > 1 and print " Args: @{$self->{'args'}}\n\n";
if(DEBUG > 3) {
print "Object contents:\n";
my @x = %$self;
while(@x) {
$x[1] = '<undef>' unless defined $x[1];
$x[1] = "@{$x[1]}" if ref( $x[1] ) eq 'ARRAY';
print " [$x[0]] => [$x[1]]\n";
splice @x,0,2;
}
print "\n";
}
# TODO: make it deal with being invoked as various different things
# such as perlfaq".
return $self->usage_brief unless @{ $self->{'args'} };
$self->options_reading;
$self->pagers_guessing;
$self->aside(sprintf "$0 => %s v%s\n", ref($self), $self->VERSION);
$self->drop_privs_maybe unless $self->opt_U;
$self->options_processing;
# Hm, we have @pages and @found, but we only really act on one
# file per call, with the exception of the opt_q hack, and with
# -l things
$self->aside("\n");
my @pages;
$self->{'pages'} = \@pages;
if( $self->opt_f) { @pages = qw(perlfunc perlop) }
elsif( $self->opt_q) { @pages = ("perlfaq1" .. "perlfaq9") }
elsif( $self->opt_v) { @pages = ("perlvar") }
elsif( $self->opt_a) { @pages = ("perlapi") }
else { @pages = @{$self->{'args'}};
# @pages = __FILE__
# if @pages == 1 and $pages[0] eq 'perldoc';
}
return $self->usage_brief unless @pages;
$self->find_good_formatter_class();
$self->formatter_sanity_check();
$self->maybe_extend_searchpath();
# for when we're apparently in a module or extension directory
my @found = $self->grand_search_init(\@pages);
exit ($self->is_vms ? 98962 : 1) unless @found;
if ($self->opt_l and not $self->opt_q ) {
DEBUG and print "We're in -l mode, so byebye after this:\n";
print join("\n", @found), "\n";
return;
}
$self->tweak_found_pathnames(\@found);
$self->assert_closing_stdout;
return $self->page_module_file(@found) if $self->opt_m;
DEBUG > 2 and print "Found: [@found]\n";
return $self->render_and_page(\@found);
}
#..........................................................................
{
my( %class_seen, %class_loaded );
sub find_good_formatter_class {
my $self = $_[0];
my @class_list = @{ $self->{'formatter_classes'} || [] };
$self->die( "WHAT? Nothing in the formatter class list!?" ) unless @class_list;
local @INC = @INC;
pop @INC if $INC[-1] eq '.';
my $good_class_found;
foreach my $c (@class_list) {
DEBUG > 4 and print "Trying to load $c...\n";
if($class_loaded{$c}) {
DEBUG > 4 and print "OK, the already-loaded $c it is!\n";
$good_class_found = $c;
last;
}
if($class_seen{$c}) {
DEBUG > 4 and print
"I've tried $c before, and it's no good. Skipping.\n";
next;
}
$class_seen{$c} = 1;
if( $c->can('parse_from_file') ) {
DEBUG > 4 and print
"Interesting, the formatter class $c is already loaded!\n";
} elsif(
( $self->is_os2 or $self->is_mswin32 or $self->is_dos or $self->is_os2)
# the always case-insensitive filesystems
and $class_seen{lc("~$c")}++
) {
DEBUG > 4 and print
"We already used something quite like \"\L$c\E\", so no point using $c\n";
# This avoids redefining the package.
} else {
DEBUG > 4 and print "Trying to eval 'require $c'...\n";
local $^W = $^W;
if(DEBUG() or $self->opt_D) {
# feh, let 'em see it
} else {
$^W = 0;
# The average user just has no reason to be seeing
# $^W-suppressible warnings from the require!
}
eval "require $c";
if($@) {
DEBUG > 4 and print "Couldn't load $c: $!\n";
next;
}
}
if( $c->can('parse_from_file') ) {
DEBUG > 4 and print "Settling on $c\n";
my $v = $c->VERSION;
$v = ( defined $v and length $v ) ? " version $v" : '';
$self->aside("Formatter class $c$v successfully loaded!\n");
$good_class_found = $c;
last;
} else {
DEBUG > 4 and print "Class $c isn't a formatter?! Skipping.\n";
}
}
$self->die( "Can't find any loadable formatter class in @class_list?!\nAborting" )
unless $good_class_found;
$self->{'formatter_class'} = $good_class_found;
$self->aside("Will format with the class $good_class_found\n");
return;
}
}
#..........................................................................
sub formatter_sanity_check {
my $self = shift;
my $formatter_class = $self->{'formatter_class'}
|| $self->die( "NO FORMATTER CLASS YET!?" );
if(!$self->opt_T # so -T can FORCE sending to STDOUT
and $formatter_class->can('is_pageable')
and !$formatter_class->is_pageable
and !$formatter_class->can('page_for_perldoc')
) {
my $ext =
($formatter_class->can('output_extension')
&& $formatter_class->output_extension
) || '';
$ext = ".$ext" if length $ext;
my $me = $self->program_name;
$self->die(
"When using Perldoc to format with $formatter_class, you have to\n"
. "specify -T or -dsomefile$ext\n"
. "See `$me perldoc' for more information on those switches.\n" )
;
}
}
#..........................................................................
sub render_and_page {
my($self, $found_list) = @_;
$self->maybe_generate_dynamic_pod($found_list);
my($out, $formatter) = $self->render_findings($found_list);
if($self->opt_d) {
printf "Perldoc (%s) output saved to %s\n",
$self->{'formatter_class'} || ref($self),
$out;
print "But notice that it's 0 bytes long!\n" unless -s $out;
} elsif( # Allow the formatter to "page" itself, if it wants.
$formatter->can('page_for_perldoc')
and do {
$self->aside("Going to call $formatter\->page_for_perldoc(\"$out\")\n");
if( $formatter->page_for_perldoc($out, $self) ) {
$self->aside("page_for_perldoc returned true, so NOT paging with $self.\n");
1;
} else {
$self->aside("page_for_perldoc returned false, so paging with $self instead.\n");
'';
}
}
) {
# Do nothing, since the formatter has "paged" it for itself.
} else {
# Page it normally (internally)
if( -s $out ) { # Usual case:
$self->page($out, $self->{'output_to_stdout'}, $self->pagers);
} else {
# Odd case:
$self->aside("Skipping $out (from $$found_list[0] "
. "via $$self{'formatter_class'}) as it is 0-length.\n");
push @{ $self->{'temp_file_list'} }, $out;
$self->unlink_if_temp_file($out);
}
}
$self->after_rendering(); # any extra cleanup or whatever
return;
}
#..........................................................................
sub options_reading {
my $self = shift;
if( defined $ENV{"PERLDOC"} and length $ENV{"PERLDOC"} ) {
require Text::ParseWords;
$self->aside("Noting env PERLDOC setting of $ENV{'PERLDOC'}\n");
# Yes, appends to the beginning
unshift @{ $self->{'args'} },
Text::ParseWords::shellwords( $ENV{"PERLDOC"} )
;
DEBUG > 1 and print " Args now: @{$self->{'args'}}\n\n";
} else {
DEBUG > 1 and print " Okay, no PERLDOC setting in ENV.\n";
}
DEBUG > 1
and print " Args right before switch processing: @{$self->{'args'}}\n";
Pod::Perldoc::GetOptsOO::getopts( $self, $self->{'args'}, 'YES' )
or return $self->usage;
DEBUG > 1
and print " Args after switch processing: @{$self->{'args'}}\n";
return $self->usage if $self->opt_h;
return;
}
#..........................................................................
sub options_processing {
my $self = shift;
if ($self->opt_X) {
my $podidx = "$Config{'archlib'}/pod.idx";
$podidx = "" unless -f $podidx && -r _ && -M _ <= 7;
$self->{'podidx'} = $podidx;
}
$self->{'output_to_stdout'} = 1 if $self->opt_T or ! -t STDOUT;
$self->options_sanity;
# This used to set a default, but that's now moved into any
# formatter that cares to have a default.
if( $self->opt_n ) {
$self->add_formatter_option( '__nroffer' => $self->opt_n );
}
# Get language from PERLDOC_POD2 environment variable
if ( ! $self->opt_L && $ENV{PERLDOC_POD2} ) {
if ( $ENV{PERLDOC_POD2} eq '1' ) {
$self->_elem('opt_L',(split(/\_/, $ENV{LC_ALL} || $ENV{LC_LANG} || $ENV{LANG}))[0] );
}
else {
$self->_elem('opt_L', $ENV{PERLDOC_POD2});
}
};
# Adjust for using translation packages
$self->add_translator(split(/\s+/,$self->opt_L)) if $self->opt_L;
return;
}
#..........................................................................
sub options_sanity {
my $self = shift;
# The opts-counting stuff interacts quite badly with
# the $ENV{"PERLDOC"} stuff. I.e., if I have $ENV{"PERLDOC"}
# set to -t, and I specify -u on the command line, I don't want
# to be hectored at that -u and -t don't make sense together.
#my $opts = grep $_ && 1, # yes, the count of the set ones
# $self->opt_t, $self->opt_u, $self->opt_m, $self->opt_l
#;
#
#$self->usage("only one of -t, -u, -m or -l") if $opts > 1;
# Any sanity-checking need doing here?
# But does not make sense to set either -f or -q in $ENV{"PERLDOC"}
if( $self->opt_f or $self->opt_q or $self->opt_a) {
my $count;
$count++ if $self->opt_f;
$count++ if $self->opt_q;
$count++ if $self->opt_a;
$self->usage("Only one of -f or -q or -a") if $count > 1;
$self->warn(
"Perldoc is meant for reading one file at a time.\n",
"So these parameters are being ignored: ",
join(' ', @{$self->{'args'}}),
"\n" )
if @{$self->{'args'}}
}
return;
}
#..........................................................................
sub grand_search_init {
my($self, $pages, @found) = @_;
foreach (@$pages) {
if (/^http(s)?:\/\//) {
require HTTP::Tiny;
require File::Temp;
my $response = HTTP::Tiny->new->get($_);
if ($response->{success}) {
my ($fh, $filename) = File::Temp::tempfile(UNLINK => 1);
$fh->print($response->{content});
push @found, $filename;
($self->{podnames}{$filename} =
m{.*/([^/#?]+)} ? uc $1 : "UNKNOWN")
=~ s/\.P(?:[ML]|OD)\z//;
}
else {
print STDERR "No " .
($self->opt_m ? "module" : "documentation") . " found for \"$_\".\n";
}
next;
}
if ($self->{'podidx'} && open(PODIDX, $self->{'podidx'})) {
my $searchfor = catfile split '::', $_;
$self->aside( "Searching for '$searchfor' in $self->{'podidx'}\n" );
local $_;
while (<PODIDX>) {
chomp;
push(@found, $_) if m,/$searchfor(?:\.(?:pod|pm))?\z,i;
}
close(PODIDX) or $self->die( "Can't close $$self{'podidx'}: $!" );
next;
}
$self->aside( "Searching for $_\n" );
if ($self->opt_F) {
next unless -r;
push @found, $_ if $self->opt_l or $self->opt_m or $self->containspod($_);
next;
}
my @searchdirs;
# prepend extra search directories (including language specific)
push @searchdirs, @{ $self->{'extra_search_dirs'} };
# We must look both in @INC for library modules and in $bindir
# for executables, like h2xs or perldoc itself.
push @searchdirs, ($self->{'bindir'}, @{$self->{search_path}}, @INC);
unless ($self->opt_m) {
if ($self->is_vms) {
my($i,$trn);
for ($i = 0; $trn = $ENV{'DCL$PATH;'.$i}; $i++) {
push(@searchdirs,$trn);
}
push(@searchdirs,'perl_root:[lib.pods]') # installed pods
}
else {
push(@searchdirs, grep(-d, split($Config{path_sep},
$ENV{'PATH'})));
}
}
my @files = $self->searchfor(0,$_,@searchdirs);
if (@files) {
$self->aside( "Found as @files\n" );
}
# add "perl" prefix, so "perldoc foo" may find perlfoo.pod
elsif (BE_LENIENT and !/\W/ and @files = $self->searchfor(0, "perl$_", @searchdirs)) {
$self->aside( "Loosely found as @files\n" );
}
else {
# no match, try recursive search
@searchdirs = grep(!/^\.\z/s,@INC);
@files= $self->searchfor(1,$_,@searchdirs) if $self->opt_r;
if (@files) {
$self->aside( "Loosely found as @files\n" );
}
else {
print STDERR "No " .
($self->opt_m ? "module" : "documentation") . " found for \"$_\".\n";
if ( @{ $self->{'found'} } ) {
print STDERR "However, try\n";
my $me = $self->program_name;
for my $dir (@{ $self->{'found'} }) {
opendir(DIR, $dir) or $self->die( "opendir $dir: $!" );
while (my $file = readdir(DIR)) {
next if ($file =~ /^\./s);
$file =~ s/\.(pm|pod)\z//; # XXX: badfs
print STDERR "\t$me $_\::$file\n";
}
closedir(DIR) or $self->die( "closedir $dir: $!" );
}
}
}
}
push(@found,@files);
}
return @found;
}
#..........................................................................
sub maybe_generate_dynamic_pod {
my($self, $found_things) = @_;
my @dynamic_pod;
$self->search_perlapi($found_things, \@dynamic_pod) if $self->opt_a;
$self->search_perlfunc($found_things, \@dynamic_pod) if $self->opt_f;
$self->search_perlvar($found_things, \@dynamic_pod) if $self->opt_v;
$self->search_perlfaqs($found_things, \@dynamic_pod) if $self->opt_q;
if( ! $self->opt_f and ! $self->opt_q and ! $self->opt_v and ! $self->opt_a) {
DEBUG > 4 and print "That's a non-dynamic pod search.\n";
} elsif ( @dynamic_pod ) {
$self->aside("Hm, I found some Pod from that search!\n");
my ($buffd, $buffer) = $self->new_tempfile('pod', 'dyn');
if ( $] >= 5.008 && $self->opt_L ) {
binmode($buffd, ":encoding(UTF-8)");
print $buffd "=encoding utf8\n\n";
}
push @{ $self->{'temp_file_list'} }, $buffer;
# I.e., it MIGHT be deleted at the end.
my $in_list = !$self->not_dynamic && $self->opt_f || $self->opt_v || $self->opt_a;
print $buffd "=over 8\n\n" if $in_list;
print $buffd @dynamic_pod or $self->die( "Can't print $buffer: $!" );
print $buffd "=back\n" if $in_list;
close $buffd or $self->die( "Can't close $buffer: $!" );
@$found_things = $buffer;
# Yes, so found_things never has more than one thing in
# it, by time we leave here
$self->add_formatter_option('__filter_nroff' => 1);
} else {
@$found_things = ();
$self->aside("I found no Pod from that search!\n");
}
return;
}
#..........................................................................
sub not_dynamic {
my ($self,$value) = @_;
$self->{__not_dynamic} = $value if @_ == 2;
return $self->{__not_dynamic};
}
#..........................................................................
sub add_formatter_option { # $self->add_formatter_option('key' => 'value');
my $self = shift;
push @{ $self->{'formatter_switches'} }, [ @_ ] if @_;
DEBUG > 3 and printf "Formatter switches now: [%s]\n",
join ' ', map "[@$_]", @{ $self->{'formatter_switches'} };
return;
}
#.........................................................................
sub new_translator { # $tr = $self->new_translator($lang);
my $self = shift;
my $lang = shift;
local @INC = @INC;
pop @INC if $INC[-1] eq '.';
my $pack = 'POD2::' . uc($lang);
eval "require $pack";
if ( !$@ && $pack->can('new') ) {
return $pack->new();
}
eval { require POD2::Base };
return if $@;
return POD2::Base->new({ lang => $lang });
}
#.........................................................................
sub add_translator { # $self->add_translator($lang);
my $self = shift;
for my $lang (@_) {
my $tr = $self->new_translator($lang);
if ( defined $tr ) {
push @{ $self->{'translators'} }, $tr;
push @{ $self->{'extra_search_dirs'} }, $tr->pod_dirs;
$self->aside( "translator for '$lang' loaded\n" );
} else {
# non-installed or bad translator package
$self->warn( "Perldoc cannot load translator package for '$lang': ignored\n" );
}
}
return;
}
#..........................................................................
sub open_fh {
my ($self, $op, $path) = @_;
open my $fh, $op, $path or $self->die("Couldn't open $path: $!");
return $fh;
}
sub set_encoding {
my ($self, $fh, $encoding) = @_;
if ( $encoding =~ /utf-?8/i ) {
$encoding = ":encoding(UTF-8)";
}
else {
$encoding = ":encoding($encoding)";
}
if ( $] < 5.008 ) {
$self->aside("Your old perl doesn't have proper unicode support.");
}
else {
binmode($fh, $encoding);
}
return $fh;
}
sub search_perlvar {
my($self, $found_things, $pod) = @_;
my $opt = $self->opt_v;
if ( $opt !~ /^ (?: [\@\%\$]\S+ | [A-Z]\w* ) $/x ) {
CORE::die( "'$opt' does not look like a Perl variable\n" );
}
DEBUG > 2 and print "Search: @$found_things\n";
my $perlvar = shift @$found_things;
my $fh = $self->open_fh("<", $perlvar);
if ( $opt ne '$0' && $opt =~ /^\$\d+$/ ) { # handle $1, $2, ...
$opt = '$<I<digits>>';
}
my $search_re = quotemeta($opt);
DEBUG > 2 and
print "Going to perlvar-scan for $search_re in $perlvar\n";
# Skip introduction
local $_;
my $enc;
while (<$fh>) {
$enc = $1 if /^=encoding\s+(\S+)/;
last if /^=over 8/;
}
$fh = $self->set_encoding($fh, $enc) if $enc;
# Look for our variable
my $found = 0;
my $inheader = 1;
my $inlist = 0;
while (<$fh>) {
last if /^=head2 Error Indicators/;
# \b at the end of $` and friends borks things!
if ( m/^=item\s+$search_re\s/ ) {
$found = 1;
}
elsif (/^=item/) {
last if $found && !$inheader && !$inlist;
}
elsif (!/^\s+$/) { # not a blank line
if ( $found ) {
$inheader = 0; # don't accept more =item (unless inlist)
}
else {
@$pod = (); # reset
$inheader = 1; # start over
next;
}
}
if (/^=over/) {
++$inlist;
}
elsif (/^=back/) {
last if $found && !$inheader && !$inlist;
--$inlist;
}
push @$pod, $_;
# ++$found if /^\w/; # found descriptive text
}
@$pod = () unless $found;
if (!@$pod) {
CORE::die( "No documentation for perl variable '$opt' found\n" );
}
close $fh or $self->die( "Can't close $perlvar: $!" );
return;
}
#..........................................................................
sub search_perlop {
my ($self,$found_things,$pod) = @_;
$self->not_dynamic( 1 );
my $perlop = shift @$found_things;
# XXX FIXME: getting filehandles should probably be done in a single place
# especially since we need to support UTF8 or other encoding when dealing
# with perlop, perlfunc, perlapi, perlfaq[1-9]
my $fh = $self->open_fh('<', $perlop);
my $thing = $self->opt_f;
my $previous_line;
my $push = 0;
my $seen_item = 0;
my $skip = 1;
while( my $line = <$fh> ) {
$line =~ /^=encoding\s+(\S+)/ && $self->set_encoding($fh, $1);
# only start search after we hit the operator section
if ($line =~ m!^X<operator, regexp>!) {
$skip = 0;
}
next if $skip;
# strategy is to capture the previous line until we get a match on X<$thingy>
# if the current line contains X<$thingy>, then we push "=over", the previous line,
# the current line and keep pushing current line until we see a ^X<some-other-thing>,
# then we chop off final line from @$pod and add =back
#
# At that point, Bob's your uncle.
if ( $line =~ m!X<+\s*\Q$thing\E\s*>+!) {
if ( $previous_line ) {
push @$pod, "=over 8\n\n", $previous_line;
$previous_line = "";
}
push @$pod, $line;
$push = 1;
}
elsif ( $push and $line =~ m!^=item\s*.*$! ) {
$seen_item = 1;
}
elsif ( $push and $seen_item and $line =~ m!^X<+\s*[ a-z,?-]+\s*>+!) {
$push = 0;
$seen_item = 0;
last;
}
elsif ( $push ) {
push @$pod, $line;
}
else {
$previous_line = $line;
}
} #end while
# we overfilled by 1 line, so pop off final array element if we have any
if ( scalar @$pod ) {
pop @$pod;
# and add the =back
push @$pod, "\n\n=back\n";
DEBUG > 8 and print "PERLOP POD --->" . (join "", @$pod) . "<---\n";
}
else {
DEBUG > 4 and print "No pod from perlop\n";
}
close $fh;
return;
}
#..........................................................................
sub search_perlapi {
my($self, $found_things, $pod) = @_;
DEBUG > 2 and print "Search: @$found_things\n";
my $perlapi = shift @$found_things;
my $fh = $self->open_fh('<', $perlapi);
my $search_re = quotemeta($self->opt_a);
DEBUG > 2 and
print "Going to perlapi-scan for $search_re in $perlapi\n";
local $_;
# Look for our function
my $found = 0;
my $inlist = 0;
my @related;
my $related_re;
while (<$fh>) {
/^=encoding\s+(\S+)/ && $self->set_encoding($fh, $1);
if ( m/^=item\s+$search_re\b/ ) {
$found = 1;
}
elsif (@related > 1 and /^=item/) {
$related_re ||= join "|", @related;
if (m/^=item\s+(?:$related_re)\b/) {
$found = 1;
}
else {
last;
}
}
elsif (/^=item/) {
last if $found > 1 and not $inlist;
}
elsif ($found and /^X<[^>]+>/) {
push @related, m/X<([^>]+)>/g;
}
next unless $found;
if (/^=over/) {
++$inlist;
}
elsif (/^=back/) {
last if $found > 1 and not $inlist;
--$inlist;
}
push @$pod, $_;
++$found if /^\w/; # found descriptive text
}
if (!@$pod) {
CORE::die( sprintf
"No documentation for perl api function '%s' found\n",
$self->opt_a )
;
}
close $fh or $self->die( "Can't open $perlapi: $!" );
return;
}
#..........................................................................
sub search_perlfunc {
my($self, $found_things, $pod) = @_;
DEBUG > 2 and print "Search: @$found_things\n";
my $pfunc = shift @$found_things;
my $fh = $self->open_fh("<", $pfunc); # "Funk is its own reward"
# Functions like -r, -e, etc. are listed under `-X'.
my $search_re = ($self->opt_f =~ /^-[rwxoRWXOeszfdlpSbctugkTBMAC]$/)
? '(?:I<)?-X' : quotemeta($self->opt_f) ;
DEBUG > 2 and
print "Going to perlfunc-scan for $search_re in $pfunc\n";
my $re = 'Alphabetical Listing of Perl Functions';
# Check available translator or backup to default (english)
if ( $self->opt_L && defined $self->{'translators'}->[0] ) {
my $tr = $self->{'translators'}->[0];
$re = $tr->search_perlfunc_re if $tr->can('search_perlfunc_re');
if ( $] < 5.008 ) {
$self->aside("Your old perl doesn't really have proper unicode support.");
}
}
# Skip introduction
local $_;
while (<$fh>) {
/^=encoding\s+(\S+)/ && $self->set_encoding($fh, $1);
last if /^=head2 $re/;
}
# Look for our function
my $found = 0;
my $inlist = 0;
my @perlops = qw(m q qq qr qx qw s tr y);
my @related;
my $related_re;
while (<$fh>) { # "The Mothership Connection is here!"
last if( grep{ $self->opt_f eq $_ }@perlops );
if ( /^=over/ and not $found ) {
++$inlist;
}
elsif ( /^=back/ and not $found and $inlist ) {
--$inlist;
}
if ( m/^=item\s+$search_re\b/ and $inlist < 2 ) {
$found = 1;
}
elsif (@related > 1 and /^=item/) {
$related_re ||= join "|", @related;
if (m/^=item\s+(?:$related_re)\b/) {
$found = 1;
}
else {
last if $found > 1 and $inlist < 2;
}
}
elsif (/^=item/) {
last if $found > 1 and $inlist < 2;
}
elsif ($found and /^X<[^>]+>/) {
push @related, m/X<([^>]+)>/g;
}
next unless $found;
if (/^=over/) {
++$inlist;
}
elsif (/^=back/) {
--$inlist;
}
push @$pod, $_;
++$found if /^\w/; # found descriptive text
}
if( !@$pod ){
$self->search_perlop( $found_things, $pod );
}
if (!@$pod) {
CORE::die( sprintf
"No documentation for perl function '%s' found\n",
$self->opt_f )
;
}
close $fh or $self->die( "Can't close $pfunc: $!" );
return;
}
#..........................................................................
sub search_perlfaqs {
my( $self, $found_things, $pod) = @_;
my $found = 0;
my %found_in;
my $search_key = $self->opt_q;
my $rx = eval { qr/$search_key/ }
or $self->die( <<EOD );
Invalid regular expression '$search_key' given as -q pattern:
$@
Did you mean \\Q$search_key ?
EOD
local $_;
foreach my $file (@$found_things) {
$self->die( "invalid file spec: $!" ) if $file =~ /[<>|]/;
my $fh = $self->open_fh("<", $file);
while (<$fh>) {
/^=encoding\s+(\S+)/ && $self->set_encoding($fh, $1);
if ( m/^=head2\s+.*(?:$search_key)/i ) {
$found = 1;
push @$pod, "=head1 Found in $file\n\n" unless $found_in{$file}++;
}
elsif (/^=head[12]/) {
$found = 0;
}
next unless $found;
push @$pod, $_;
}
close($fh);
}
CORE::die("No documentation for perl FAQ keyword '$search_key' found\n")
unless @$pod;
if ( $self->opt_l ) {
CORE::die((join "\n", keys %found_in) . "\n");
}
return;
}
#..........................................................................
sub render_findings {
# Return the filename to open
my($self, $found_things) = @_;
my $formatter_class = $self->{'formatter_class'}
|| $self->die( "No formatter class set!?" );
my $formatter = $formatter_class->can('new')
? $formatter_class->new
: $formatter_class
;
if(! @$found_things) {
$self->die( "Nothing found?!" );
# should have been caught before here
} elsif(@$found_things > 1) {
$self->warn(
"Perldoc is only really meant for reading one document at a time.\n",
"So these parameters are being ignored: ",
join(' ', @$found_things[1 .. $#$found_things] ),
"\n" );
}
my $file = $found_things->[0];
DEBUG > 3 and printf "Formatter switches now: [%s]\n",
join ' ', map "[@$_]", @{ $self->{'formatter_switches'} };
# Set formatter options:
if( ref $formatter ) {
foreach my $f (@{ $self->{'formatter_switches'} || [] }) {
my($switch, $value, $silent_fail) = @$f;
if( $formatter->can($switch) ) {
eval { $formatter->$switch( defined($value) ? $value : () ) };
$self->warn( "Got an error when setting $formatter_class\->$switch:\n$@\n" )
if $@;
} else {
if( $silent_fail or $switch =~ m/^__/s ) {
DEBUG > 2 and print "Formatter $formatter_class doesn't support $switch\n";
} else {
$self->warn( "$formatter_class doesn't recognize the $switch switch.\n" );
}
}
}
}
$self->{'output_is_binary'} =
$formatter->can('write_with_binmode') && $formatter->write_with_binmode;
if( $self->{podnames} and exists $self->{podnames}{$file} and
$formatter->can('name') ) {
$formatter->name($self->{podnames}{$file});
}
my ($out_fh, $out) = $self->new_output_file(
( $formatter->can('output_extension') && $formatter->output_extension )
|| undef,
$self->useful_filename_bit,
);
# Now, finally, do the formatting!
{
local $^W = $^W;
if(DEBUG() or $self->opt_D) {
# feh, let 'em see it
} else {
$^W = 0;
# The average user just has no reason to be seeing
# $^W-suppressible warnings from the formatting!
}
eval { $formatter->parse_from_file( $file, $out_fh ) };
}
$self->warn( "Error while formatting with $formatter_class:\n $@\n" ) if $@;
DEBUG > 2 and print "Back from formatting with $formatter_class\n";
close $out_fh
or $self->warn( "Can't close $out: $!\n(Did $formatter already close it?)" );
sleep 0; sleep 0; sleep 0;
# Give the system a few timeslices to meditate on the fact
# that the output file does in fact exist and is closed.
$self->unlink_if_temp_file($file);
unless( -s $out ) {
if( $formatter->can( 'if_zero_length' ) ) {
# Basically this is just a hook for Pod::Simple::Checker; since
# what other class could /happily/ format an input file with Pod
# as a 0-length output file?
$formatter->if_zero_length( $file, $out, $out_fh );
} else {
$self->warn( "Got a 0-length file from $$found_things[0] via $formatter_class!?\n" );
}
}
DEBUG and print "Finished writing to $out.\n";
return($out, $formatter) if wantarray;
return $out;
}
#..........................................................................
sub unlink_if_temp_file {
# Unlink the specified file IFF it's in the list of temp files.
# Really only used in the case of -f / -q things when we can
# throw away the dynamically generated source pod file once
# we've formatted it.
#
my($self, $file) = @_;
return unless defined $file and length $file;
my $temp_file_list = $self->{'temp_file_list'} || return;
if(grep $_ eq $file, @$temp_file_list) {
$self->aside("Unlinking $file\n");
unlink($file) or $self->warn( "Odd, couldn't unlink $file: $!" );
} else {
DEBUG > 1 and print "$file isn't a temp file, so not unlinking.\n";
}
return;
}
#..........................................................................
sub after_rendering {
my $self = $_[0];
$self->after_rendering_VMS if $self->is_vms;
$self->after_rendering_MSWin32 if $self->is_mswin32;
$self->after_rendering_Dos if $self->is_dos;
$self->after_rendering_OS2 if $self->is_os2;
return;
}
sub after_rendering_VMS { return }
sub after_rendering_Dos { return }
sub after_rendering_OS2 { return }
sub after_rendering_MSWin32 { return }
#..........................................................................
# : : : : : : : : :
#..........................................................................
sub minus_f_nocase { # i.e., do like -f, but without regard to case
my($self, $dir, $file) = @_;
my $path = catfile($dir,$file);
return $path if -f $path and -r _;
if(!$self->opt_i
or $self->is_vms or $self->is_mswin32
or $self->is_dos or $self->is_os2
) {
# On a case-forgiving file system, or if case is important,
# that is it, all we can do.
$self->warn( "Ignored $path: unreadable\n" ) if -f _;
return '';
}
local *DIR;
my @p = ($dir);
my($p,$cip);
foreach $p (splitdir $file){
my $try = catfile @p, $p;
$self->aside("Scrutinizing $try...\n");
stat $try;
if (-d _) {
push @p, $p;
if ( $p eq $self->{'target'} ) {
my $tmp_path = catfile @p;
my $path_f = 0;
for (@{ $self->{'found'} }) {
$path_f = 1 if $_ eq $tmp_path;
}
push (@{ $self->{'found'} }, $tmp_path) unless $path_f;
$self->aside( "Found as $tmp_path but directory\n" );
}
}
elsif (-f _ && -r _ && lc($try) eq lc($path)) {
return $try;
}
elsif (-f _) {
$self->warn( "Ignored $try: unreadable or file/dir mismatch\n" );
}
elsif (-d catdir(@p)) { # at least we see the containing directory!
my $found = 0;
my $lcp = lc $p;
my $p_dirspec = catdir(@p);
opendir DIR, $p_dirspec or $self->die( "opendir $p_dirspec: $!" );
while(defined( $cip = readdir(DIR) )) {
if (lc $cip eq $lcp){
$found++;
last; # XXX stop at the first? what if there's others?
}
}
closedir DIR or $self->die( "closedir $p_dirspec: $!" );
return "" unless $found;
push @p, $cip;
my $p_filespec = catfile(@p);
return $p_filespec if -f $p_filespec and -r _;
$self->warn( "Ignored $p_filespec: unreadable\n" ) if -f _;
}
}
return "";
}
#..........................................................................
sub pagers_guessing {
# TODO: This whole subroutine needs to be rewritten. It's semi-insane
# right now.
my $self = shift;
my @pagers;
push @pagers, $self->pagers;
$self->{'pagers'} = \@pagers;
if ($self->is_mswin32) {
push @pagers, qw( more< less notepad );
unshift @pagers, $ENV{PAGER} if $ENV{PAGER};
}
elsif ($self->is_vms) {
push @pagers, qw( most more less type/page );
}
elsif ($self->is_dos) {
push @pagers, qw( less.exe more.com< );
unshift @pagers, $ENV{PAGER} if $ENV{PAGER};
}
elsif ( $self->is_amigaos) {
push @pagers, qw( /SYS/Utilities/MultiView /SYS/Utilities/More /C/TYPE );
unshift @pagers, "$ENV{PAGER}" if $ENV{PAGER};
}
else {
if ($self->is_os2) {
unshift @pagers, 'less', 'cmd /c more <';
}
push @pagers, qw( more less pg view cat );
unshift @pagers, "$ENV{PAGER} <" if $ENV{PAGER};
}
if ($self->is_cygwin) {
if (($pagers[0] eq 'less') || ($pagers[0] eq '/usr/bin/less')) {
unshift @pagers, '/usr/bin/less -isrR';
unshift @pagers, $ENV{PAGER} if $ENV{PAGER};
}
}
if ( $self->opt_m ) {
unshift @pagers, "$ENV{PERLDOC_SRC_PAGER}" if $ENV{PERLDOC_SRC_PAGER}
}
else {
unshift @pagers, "$ENV{MANPAGER} <" if $ENV{MANPAGER};
unshift @pagers, "$ENV{PERLDOC_PAGER} <" if $ENV{PERLDOC_PAGER};
}
$self->aside("Pagers: ", @pagers);
return;
}
#..........................................................................
sub page_module_file {
my($self, @found) = @_;
# Security note:
# Don't ever just pass this off to anything like MSWin's "start.exe",
# since we might be calling on a .pl file, and we wouldn't want that
# to actually /execute/ the file that we just want to page thru!
# Also a consideration if one were to use a web browser as a pager;
# doing so could trigger the browser's MIME mapping for whatever
# it thinks .pm/.pl/whatever is. Probably just a (useless and
# annoying) "Save as..." dialog, but potentially executing the file
# in question -- particularly in the case of MSIE and it's, ahem,
# occasionally hazy distinction between OS-local extension
# associations, and browser-specific MIME mappings.
if(@found > 1) {
$self->warn(
"Perldoc is only really meant for reading one document at a time.\n" .
"So these files are being ignored: " .
join(' ', @found[1 .. $#found] ) .
"\n" )
}
return $self->page($found[0], $self->{'output_to_stdout'}, $self->pagers);
}
#..........................................................................
sub check_file {
my($self, $dir, $file) = @_;
unless( ref $self ) {
# Should never get called:
$Carp::Verbose = 1;
require Carp;
Carp::croak( join '',
"Crazy ", __PACKAGE__, " error:\n",
"check_file must be an object_method!\n",
"Aborting"
);
}
if(length $dir and not -d $dir) {
DEBUG > 3 and print " No dir $dir -- skipping.\n";
return "";
}
my $path = $self->minus_f_nocase($dir,$file);
if( length $path and ($self->opt_m ? $self->isprintable($path)
: $self->containspod($path)) ) {
DEBUG > 3 and print
" The file $path indeed looks promising!\n";
return $path;
}
DEBUG > 3 and print " No good: $file in $dir\n";
return "";
}
sub isprintable {
my($self, $file, $readit) = @_;
my $size= 1024;
my $maxunprintfrac= 0.2; # tolerate some unprintables for UTF-8 comments etc.
return 1 if !$readit && $file =~ /\.(?:pl|pm|pod|cmd|com|bat)\z/i;
my $data;
local($_);
my $fh = $self->open_fh("<", $file);
read $fh, $data, $size;
close $fh;
$size= length($data);
$data =~ tr/\x09-\x0D\x20-\x7E//d;
return length($data) <= $size*$maxunprintfrac;
}
#..........................................................................
sub containspod {
my($self, $file, $readit) = @_;
return 1 if !$readit && $file =~ /\.pod\z/i;
# Under cygwin the /usr/bin/perl is legal executable, but
# you cannot open a file with that name. It must be spelled
# out as "/usr/bin/perl.exe".
#
# The following if-case under cygwin prevents error
#
# $ perldoc perl
# Cannot open /usr/bin/perl: no such file or directory
#
# This would work though
#
# $ perldoc perl.pod
if ( $self->is_cygwin and -x $file and -f "$file.exe" )
{
$self->warn( "Cygwin $file.exe search skipped\n" ) if DEBUG or $self->opt_D;
return 0;
}
local($_);
my $fh = $self->open_fh("<", $file);
while (<$fh>) {
if (/^=head/) {
close($fh) or $self->die( "Can't close $file: $!" );
return 1;
}
}
close($fh) or $self->die( "Can't close $file: $!" );
return 0;
}
#..........................................................................
sub maybe_extend_searchpath {
my $self = shift;
# Does this look like a module or extension directory?
if (-f "Makefile.PL" || -f "Build.PL") {
push @{$self->{search_path} }, '.','lib';
# don't add if superuser
if ($< && $> && -d "blib") { # don't be looking too hard now!
push @{ $self->{search_path} }, 'blib';
$self->warn( $@ ) if $@ && $self->opt_D;
}
}
return;
}
#..........................................................................
sub new_output_file {
my $self = shift;
my $outspec = $self->opt_d; # Yes, -d overrides all else!
# So don't call this twice per format-job!
return $self->new_tempfile(@_) unless defined $outspec and length $outspec;
# Otherwise open a write-handle on opt_d!f
DEBUG > 3 and print "About to try writing to specified output file $outspec\n";
my $fh = $self->open_fh(">", $outspec);
DEBUG > 3 and print "Successfully opened $outspec\n";
binmode($fh) if $self->{'output_is_binary'};
return($fh, $outspec);
}
#..........................................................................
sub useful_filename_bit {
# This tries to provide a meaningful bit of text to do with the query,
# such as can be used in naming the file -- since if we're going to be
# opening windows on temp files (as a "pager" may well do!) then it's
# better if the temp file's name (which may well be used as the window
# title) isn't ALL just random garbage!
# In other words "perldoc_LWPSimple_2371981429" is a better temp file
# name than "perldoc_2371981429". So this routine is what tries to
# provide the "LWPSimple" bit.
#
my $self = shift;
my $pages = $self->{'pages'} || return undef;
return undef unless @$pages;
my $chunk = $pages->[0];
return undef unless defined $chunk;
$chunk =~ s/:://g;
$chunk =~ s/\.\w+$//g; # strip any extension
if( $chunk =~ m/([^\#\\:\/\$]+)$/s ) { # get basename, if it's a file
$chunk = $1;
} else {
return undef;
}
$chunk =~ s/[^a-zA-Z0-9]+//g; # leave ONLY a-zA-Z0-9 things!
$chunk = substr($chunk, -10) if length($chunk) > 10;
return $chunk;
}
#..........................................................................
sub new_tempfile { # $self->new_tempfile( [$suffix, [$infix] ] )
my $self = shift;
++$Temp_Files_Created;
require File::Temp;
return File::Temp::tempfile(UNLINK => 1);
}
#..........................................................................
sub page { # apply a pager to the output file
my ($self, $output, $output_to_stdout, @pagers) = @_;
if ($output_to_stdout) {
$self->aside("Sending unpaged output to STDOUT.\n");
my $fh = $self->open_fh("<", $output);
local $_;
while (<$fh>) {
print or $self->die( "Can't print to stdout: $!" );
}
close $fh or $self->die( "Can't close while $output: $!" );
$self->unlink_if_temp_file($output);
} else {
# On VMS, quoting prevents logical expansion, and temp files with no
# extension get the wrong default extension (such as .LIS for TYPE)
$output = VMS::Filespec::rmsexpand($output, '.') if $self->is_vms;
$output =~ s{/}{\\}g if $self->is_mswin32 || $self->is_dos;
# Altho "/" under MSWin is in theory good as a pathsep,
# many many corners of the OS don't like it. So we
# have to force it to be "\" to make everyone happy.
# if we are on an amiga convert unix path to an amiga one
$output =~ s/^\/(.*)\/(.*)/$1:$2/ if $self->is_amigaos;
foreach my $pager (@pagers) {
$self->aside("About to try calling $pager $output\n");
if ($self->is_vms) {
last if system("$pager $output") == 0;
} elsif($self->is_amigaos) {
last if system($pager, $output) == 0;
} else {
# fix visible escape codes in ToTerm output
# https://bugs.debian.org/758689
local $ENV{LESS} = defined $ENV{LESS} ? "$ENV{LESS} -R" : "-R";
last if system("$pager \"$output\"") == 0;
}
}
}
return;
}
#..........................................................................
sub searchfor {
my($self, $recurse,$s,@dirs) = @_;
$s =~ s!::!/!g;
$s = VMS::Filespec::unixify($s) if $self->is_vms;
return $s if -f $s && $self->containspod($s);
$self->aside( "Looking for $s in @dirs\n" );
my $ret;
my $i;
my $dir;
$self->{'target'} = (splitdir $s)[-1]; # XXX: why not use File::Basename?
for ($i=0; $i<@dirs; $i++) {
$dir = $dirs[$i];
next unless -d $dir;
($dir = VMS::Filespec::unixpath($dir)) =~ s!/\z!! if $self->is_vms;
if ( (! $self->opt_m && ( $ret = $self->check_file($dir,"$s.pod")))
or ( $ret = $self->check_file($dir,"$s.pm"))
or ( $ret = $self->check_file($dir,$s))
or ( $self->is_vms and
$ret = $self->check_file($dir,"$s.com"))
or ( $self->is_os2 and
$ret = $self->check_file($dir,"$s.cmd"))
or ( ($self->is_mswin32 or $self->is_dos or $self->is_os2) and
$ret = $self->check_file($dir,"$s.bat"))
or ( $ret = $self->check_file("$dir/pod","$s.pod"))
or ( $ret = $self->check_file("$dir/pod",$s))
or ( $ret = $self->check_file("$dir/pods","$s.pod"))
or ( $ret = $self->check_file("$dir/pods",$s))
) {
DEBUG > 1 and print " Found $ret\n";
return $ret;
}
if ($recurse) {
opendir(D,$dir) or $self->die( "Can't opendir $dir: $!" );
my @newdirs = map catfile($dir, $_), grep {
not /^\.\.?\z/s and
not /^auto\z/s and # save time! don't search auto dirs
-d catfile($dir, $_)
} readdir D;
closedir(D) or $self->die( "Can't closedir $dir: $!" );
next unless @newdirs;
# what a wicked map!
@newdirs = map((s/\.dir\z//,$_)[1],@newdirs) if $self->is_vms;
$self->aside( "Also looking in @newdirs\n" );
push(@dirs,@newdirs);
}
}
return ();
}
#..........................................................................
{
my $already_asserted;
sub assert_closing_stdout {
my $self = shift;
return if $already_asserted;
eval q~ END { close(STDOUT) || CORE::die "Can't close STDOUT: $!" } ~;
# What for? to let the pager know that nothing more will come?
$self->die( $@ ) if $@;
$already_asserted = 1;
return;
}
}
#..........................................................................
sub tweak_found_pathnames {
my($self, $found) = @_;
if ($self->is_mswin32) {
foreach (@$found) { s,/,\\,g }
}
foreach (@$found) { s,',\\',g } # RT 37347
return;
}
#..........................................................................
# : : : : : : : : :
#..........................................................................
sub am_taint_checking {
my $self = shift;
$self->die( "NO ENVIRONMENT?!?!" ) unless keys %ENV; # reset iterator along the way
my($k,$v) = each %ENV;
return is_tainted($v);
}
#..........................................................................
sub is_tainted { # just a function
my $arg = shift;
my $nada = substr($arg, 0, 0); # zero-length!
local $@; # preserve the caller's version of $@
eval { eval "# $nada" };
return length($@) != 0;
}
#..........................................................................
sub drop_privs_maybe {
my $self = shift;
DEBUG and print "Attempting to drop privs...\n";
# Attempt to drop privs if we should be tainting and aren't
if (!( $self->is_vms || $self->is_mswin32 || $self->is_dos
|| $self->is_os2
)
&& ($> == 0 || $< == 0)
&& !$self->am_taint_checking()
) {
my $id = eval { getpwnam("nobody") };
$id = eval { getpwnam("nouser") } unless defined $id;
$id = -2 unless defined $id;
#
# According to Stevens' APUE and various
# (BSD, Solaris, HP-UX) man pages, setting
# the real uid first and effective uid second
# is the way to go if one wants to drop privileges,
# because if one changes into an effective uid of
# non-zero, one cannot change the real uid any more.
#
# Actually, it gets even messier. There is
# a third uid, called the saved uid, and as
# long as that is zero, one can get back to
# uid of zero. Setting the real-effective *twice*
# helps in *most* systems (FreeBSD and Solaris)
# but apparently in HP-UX even this doesn't help:
# the saved uid stays zero (apparently the only way
# in HP-UX to change saved uid is to call setuid()
# when the effective uid is zero).
#
eval {
$< = $id; # real uid
$> = $id; # effective uid
$< = $id; # real uid
$> = $id; # effective uid
};
if( !$@ && $< && $> ) {
DEBUG and print "OK, I dropped privileges.\n";
} elsif( $self->opt_U ) {
DEBUG and print "Couldn't drop privileges, but in -U mode, so feh."
} else {
DEBUG and print "Hm, couldn't drop privileges. Ah well.\n";
# We used to die here; but that seemed pointless.
}
}
return;
}
#..........................................................................
1;
__END__
=head1 NAME
Pod::Perldoc - Look up Perl documentation in Pod format.
=head1 SYNOPSIS
use Pod::Perldoc ();
Pod::Perldoc->run();
=head1 DESCRIPTION
The guts of L<perldoc> utility.
=head1 SEE ALSO
L<perldoc>
=head1 COPYRIGHT AND DISCLAIMERS
Copyright (c) 2002-2007 Sean M. Burke.
This library is free software; you can redistribute it and/or modify it
under the same terms as Perl itself.
This program is distributed in the hope that it will be useful, but
without any warranty; without even the implied warranty of
merchantability or fitness for a particular purpose.
=head1 AUTHOR
Current maintainer: Mark Allen C<< <[email protected]> >>
Past contributions from:
brian d foy C<< <[email protected]> >>
Adriano R. Ferreira C<< <[email protected]> >>,
Sean M. Burke C<< <[email protected]> >>
=cut
| 30.194872 | 96 | 0.506763 |
edbb1ad71fba6992bae7b0165a9397428500c534 | 3,111 | pl | Perl | tests/cryptotest.pl | onyxdevteam/silicon | 2b2bb5d2181efae9a8e792a6c126190d4e85698e | [
"BSD-3-Clause"
] | null | null | null | tests/cryptotest.pl | onyxdevteam/silicon | 2b2bb5d2181efae9a8e792a6c126190d4e85698e | [
"BSD-3-Clause"
] | null | null | null | tests/cryptotest.pl | onyxdevteam/silicon | 2b2bb5d2181efae9a8e792a6c126190d4e85698e | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2014-2017, The Silicon Project
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be
# used to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
# THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
# THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Parts of this file are originally copyright (c) 2012-2013 The Cryptonote developers
require 'cryptolib.pl';
$key = 'fc7557a2595788aea7205ffd801b8a157dc9c698adb2c598ba543eaa67cb700e';
$pt = '664422cf6f4100dc6b3298e41ca53b173a98918fc9cb50fc2d590b7d1285f4ab';
$m = keccak_256(pack 'H*', 'c8fedd380dbae40ffb52');
$s = '26a9589121e569ee0ac2e8ac7a7ea331d348f9a0fa8d28926d27c7506759e406';
$e = '780be966ad89ba526cc7adf4b771adbdaa0568038e6a30e776839a81e57dee0c';
print " self SIG -- OK\n" if check_s($m,$pt,sign($m,$key));
print " test SIG -- OK\n" if check_s($m,$pt,$s,$e);
@aa = r_sign($m,im_gen($pt,$key),$key,1,ec_pack(ec_mul(111,$x0,$y0)),$pt,ec_pack(ec_mul(47,$x0,$y0)));
print " self RSIG -- OK\n" if r_check_s($m,im_gen($pt,$key),ec_pack(ec_mul(111,$x0,$y0)),$pt,ec_pack(ec_mul(47,$x0,$y0)),@aa);
$k1 = '6a7a81a52ba91b9785b484d761bfb3ad9a473c147e17b7fbbc3992e8c97108d7';
$sk1 = '3ce3eb784016a53fa915053d24f55dc8fbc7af3fabc915701adb67e61a25f50f';
$k2 = '0f3fe9c20b24a11bf4d6d1acd335c6a80543f1f0380590d7323caf1390c78e88';
$sk2 = '4967a2bfa0c8a0afc0df238d068b6c7182577afd0781c9d3720bb7a6cf71630c'; #main key
$m = keccak_256(pack 'H*', '5020c4d530b6ec6cb4d9');
@sig = ('b7903a4a3aca7253bb98be335014bebb33683aedca0bc46e288e229ecfccbe0e',
'2c15e4de88ff38d655e2deef0e06a7ca4541a7754c37e7b20875cce791754508',
'6acae497177b2eeaf658b813eaf50e1e06f3d1107694beff9b520c65ee624f05',
'026c8d9801f7330aa82426adf5bacf4546d83df0cc12321ede90df8c0d9aa800');
print " test RSIG -- OK" if r_check_s($m,im_gen($k2,$sk2),$k1, $k2, @sig);
| 52.728814 | 128 | 0.781742 |
edb6239ae7855d6ede7a90ec28891f083a1bfff5 | 9,557 | pm | Perl | nems/oldsynth/jade_dec_nem_mux/top/genesis_work/test_lut.pm | mfkiwl/NEM-Relay-CGRA | 1ad59b83b9a61ee56da43e5491a95d2f4e6c2ac4 | [
"BSD-3-Clause"
] | 2 | 2021-03-28T08:03:01.000Z | 2021-09-19T08:10:02.000Z | nems/oldsynth/jade_dec_nem_mux/top/genesis_work/test_lut.pm | mfkiwl/NEM-Relay-CGRA | 1ad59b83b9a61ee56da43e5491a95d2f4e6c2ac4 | [
"BSD-3-Clause"
] | null | null | null | nems/oldsynth/jade_dec_nem_mux/top/genesis_work/test_lut.pm | mfkiwl/NEM-Relay-CGRA | 1ad59b83b9a61ee56da43e5491a95d2f4e6c2ac4 | [
"BSD-3-Clause"
] | 2 | 2021-03-28T08:02:57.000Z | 2021-11-06T05:14:17.000Z | package test_lut;
use strict;
use vars qw($VERSION @ISA @EXPORT @EXPORT_OK);
use Exporter;
use FileHandle;
use Env; # Make environment variables available
use Genesis2::Manager 1.00;
use Genesis2::UniqueModule 1.00;
@ISA = qw(Exporter Genesis2::UniqueModule);
@EXPORT = qw();
@EXPORT_OK = qw();
$VERSION = '1.0';
sub get_SrcSuffix {Genesis2::UniqueModule::private_to_me(); return ".svp";};
sub get_OutfileSuffix {Genesis2::UniqueModule::private_to_me(); return ".sv"};
############################### Module Starts Here ###########################
sub to_verilog{
# START PRE-GENERATED TO_VERILOG PREFIX CODE >>>
my $self = shift;
print STDERR "$self->{BaseModuleName}->to_verilog: Start user code\n"
if $self->{Debug} & 8;
# <<< END PRE-GENERATED TO_VERILOG PREFIX CODE
$self->SUPER::to_verilog('/Users/akashlevy/OneDrive - Levylab/Documents/Research/Hybrid-RRAM-NEMS/cgra/jade/pe_new/pe/rtl/test_lut.svp');
# START USER CODE FROM /Users/akashlevy/OneDrive - Levylab/Documents/Research/Hybrid-RRAM-NEMS/cgra/jade/pe_new/pe/rtl/test_lut.svp PARSED INTO PACKAGE >>>
# line 1 "/Users/akashlevy/OneDrive - Levylab/Documents/Research/Hybrid-RRAM-NEMS/cgra/jade/pe_new/pe/rtl/test_lut.svp"
my $lut_inps = parameter(Name=>'lut_inps',
Val=>2, Min=>2, Step=>1, Max=>16,
Doc=>"Number of inputs to a LUT");
my $c_in_decl = ($lut_inps > 3) ? "[".($lut_inps-3).":0]" : " ";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } 'module '; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } mname; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' #(';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' parameter DataWidth = 16';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ') (';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' input cfg_clk,';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' input cfg_rst_n,';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' input [31:0] cfg_d,';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' input [7:0] cfg_a,';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' input cfg_en,';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' input [DataWidth-1:0] op_a_in,';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' input [DataWidth-1:0] op_b_in,';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
if ($lut_inps > 2) {
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' input '; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } $c_in_decl; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' op_c_in,';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
}
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' output logic [31:0] read_data,';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' output logic [DataWidth-1:0] res';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ');';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } 'genvar ggg;';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } 'generate';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' for (ggg = 0; ggg < DataWidth; ggg = ggg +1) begin : GEN_LUT';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' logic ['; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } 2**$lut_inps-1; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ':0] lut;';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' always_ff @(posedge cfg_clk or negedge cfg_rst_n) begin';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' if(~cfg_rst_n) begin';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' lut <= '; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } 2**$lut_inps; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '\'h0;';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' end else if(cfg_en && (cfg_a == $unsigned(ggg/4)) ) begin';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
my $cfg_index = 2**$lut_inps;
if ($cfg_index > 32) {$cfg_index = 32;}
if((2**$lut_inps) > 32) {
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' lut <= {'; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } (2**$lut_inps)/32; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '{cfg_d['; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } $cfg_index-1; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ': 0]}};';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
} else {
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' lut <= cfg_d['; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } $cfg_index-1; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ': 0];';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
}
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' end';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' end';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
if ($lut_inps > 2) {
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' assign res[ggg] = lut[{op_c_in, op_b_in[ggg], op_a_in[ggg]}];';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
} else {
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' assign res[ggg] = lut[{op_b_in[ggg], op_a_in[ggg]}];';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
}
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' assign read_data = {'; print { $Genesis2::UniqueModule::myself->{OutfileHandle} } 32-(2**$lut_inps); print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '\'b0, lut};';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } ' end';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } 'endgenerate';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } 'endmodule';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
print { $Genesis2::UniqueModule::myself->{OutfileHandle} } '';print { $Genesis2::UniqueModule::myself->{OutfileHandle} } "\n";
# <<< END USER CODE FROM /Users/akashlevy/OneDrive - Levylab/Documents/Research/Hybrid-RRAM-NEMS/cgra/jade/pe_new/pe/rtl/test_lut.svp PARSED INTO PACKAGE
# START PRE-GENERATED TO_VERILOG SUFFIX CODE >>>
print STDERR "$self->{BaseModuleName}->to_verilog: Done with user code\n"
if $self->{Debug} & 8;
#
# clean up code comes here...
#
# <<< END PRE-GENERATED TO_VERILOG SUFFIX CODE
}
| 92.786408 | 436 | 0.673433 |
edde42d0f0eede60dca33257999a8062b1ecaf0f | 1,299 | pl | Perl | script/hyperglossary_fastcgi.pl | DeepLit/WHG | f6567b04d6c4a194ff8fdaf07380aa390f639739 | [
"Apache-2.0"
] | 1 | 2016-01-14T20:45:13.000Z | 2016-01-14T20:45:13.000Z | script/hyperglossary_fastcgi.pl | DeepLit/WHG | f6567b04d6c4a194ff8fdaf07380aa390f639739 | [
"Apache-2.0"
] | null | null | null | script/hyperglossary_fastcgi.pl | DeepLit/WHG | f6567b04d6c4a194ff8fdaf07380aa390f639739 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env perl
use FindBin;
BEGIN { do "$FindBin::Bin/env" or die $@ }
use Catalyst::ScriptRunner;
Catalyst::ScriptRunner->run('HyperGlossary', 'FastCGI');
1;
=head1 NAME
hyperglossary_fastcgi.pl - Catalyst FastCGI
=head1 SYNOPSIS
hyperglossary_fastcgi.pl [options]
Options:
-? -help display this help and exits
-l --listen Socket path to listen on
(defaults to standard input)
can be HOST:PORT, :PORT or a
filesystem path
-n --nproc specify number of processes to keep
to serve requests (defaults to 1,
requires -listen)
-p --pidfile specify filename for pid file
(requires -listen)
-d --daemon daemonize (requires -listen)
-M --manager specify alternate process manager
(FCGI::ProcManager sub-class)
or empty string to disable
-e --keeperr send error messages to STDOUT, not
to the webserver
--proc_title Set the process title (is possible)
=head1 DESCRIPTION
Run a Catalyst application as fastcgi.
=head1 AUTHORS
Catalyst Contributors, see Catalyst.pm
=head1 COPYRIGHT
This library is free software. You can redistribute it and/or modify
it under the same terms as Perl itself.
=cut
| 25.470588 | 68 | 0.649731 |
ed6b5fcda0a22f2d12a6d4284b50e53a5f55cc86 | 987 | pl | Perl | Prolog/LogicoPt1/exemplos.pl | LeandraOS/PracticesPLP | f60e30a3ada2f85fc2ddf169285ac9118744d1a7 | [
"MIT"
] | null | null | null | Prolog/LogicoPt1/exemplos.pl | LeandraOS/PracticesPLP | f60e30a3ada2f85fc2ddf169285ac9118744d1a7 | [
"MIT"
] | null | null | null | Prolog/LogicoPt1/exemplos.pl | LeandraOS/PracticesPLP | f60e30a3ada2f85fc2ddf169285ac9118744d1a7 | [
"MIT"
] | null | null | null | % Incrementar um número inteiro
acc(X, R) :- R is X + 1.
main:-
read(X),
acc(X, Y),
write(Y).
% Programa que lê o nome e notas de um aluno e verifica sua situação (APROVADO, REPROVADO, ou NA FINAL)
leEntradas:-
write('Digite o Nome:'),
read(Nome),
write('Digite a primeira Nota: '),
read(N1),
write('Digite a segunda Nota: '),
read(N2),
write('Digite a terceira Nota: '),
read(N3),
situacao(Nome, N1, N2, N3, R),
write(R).
situacao(Nome, N1, N2, N3, R) :- M is (N1 + N2 + N3)/3,
(M >= 7 -> atom_concat(Nome, 'está APROVADO.', R);
M < 4 -> atom_concat(Nome, 'está REPROVADO', R);
atom_concat(Nome, 'está na FINAL.', R)).
%Crie um programa que recebe um inteiro N via entrada padrão e,
%se o número for menor que 10, escreve todos os números [N...1]
start:-
read(X),
X < 10,
loop(X).
loop(0).
loop(N):-
N > 0,
write('Number:'),
write(N),
nl,
M is N - 1,
loop(M).
| 21.933333 | 103 | 0.561297 |
eddf30a0b219ee6aa28f69cfde2f6f46fcb9c6ac | 570 | pl | Perl | WorkEnv/PathScripts/snake_percent_filename.pl | m-macnair/Toolbox | 80eff1f0ab1f155302b5a2598c8fbaf613110392 | [
"BSD-3-Clause"
] | null | null | null | WorkEnv/PathScripts/snake_percent_filename.pl | m-macnair/Toolbox | 80eff1f0ab1f155302b5a2598c8fbaf613110392 | [
"BSD-3-Clause"
] | null | null | null | WorkEnv/PathScripts/snake_percent_filename.pl | m-macnair/Toolbox | 80eff1f0ab1f155302b5a2598c8fbaf613110392 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/perl
#ABSTRACT:
our $VERSION = 'v0.0.2';
##~ DIGEST : 778c0aa665f47981eb775e0019f1e3cd
use strict;
use warnings;
package Obj;
use Moo;
use parent 'Moo::GenericRoleClass::CLI'; #provides CLI, FileSystem, Common
with qw//;
sub process {
my ( $self, $path ) = @_;
$path = $self->abs_path( $path );
my $new_path = $self->snake_percent_file( $path );
if ( -e $new_path ) {
die "Intended new path [$new_path] exists";
}
$self->mvf( $path, $new_path );
}
1;
package main;
main();
sub main {
my $self = Obj->new();
print $self->process( @ARGV );
}
| 16.285714 | 75 | 0.638596 |
edbbe1943001ba62d3ef98f458c292fb748bf0e4 | 1,647 | pm | Perl | auto-lib/Paws/ApiGatewayV2/DomainNames.pm | meis/aws-sdk-perl | 6d61ffcf351e446f06d7e84e53caa08d98573959 | [
"Apache-2.0"
] | null | null | null | auto-lib/Paws/ApiGatewayV2/DomainNames.pm | meis/aws-sdk-perl | 6d61ffcf351e446f06d7e84e53caa08d98573959 | [
"Apache-2.0"
] | 1 | 2021-05-26T19:13:58.000Z | 2021-05-26T19:13:58.000Z | auto-lib/Paws/ApiGatewayV2/DomainNames.pm | meis/aws-sdk-perl | 6d61ffcf351e446f06d7e84e53caa08d98573959 | [
"Apache-2.0"
] | null | null | null | package Paws::ApiGatewayV2::DomainNames;
use Moose;
has Items => (is => 'ro', isa => 'ArrayRef[Paws::ApiGatewayV2::DomainName]', request_name => 'items', traits => ['NameInRequest']);
has NextToken => (is => 'ro', isa => 'Str', request_name => 'nextToken', traits => ['NameInRequest']);
1;
### main pod documentation begin ###
=head1 NAME
Paws::ApiGatewayV2::DomainNames
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::ApiGatewayV2::DomainNames object:
$service_obj->Method(Att1 => { Items => $value, ..., NextToken => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::ApiGatewayV2::DomainNames object:
$result = $service_obj->Method(...);
$result->Att1->Items
=head1 DESCRIPTION
Represents a collection of domain names.
=head1 ATTRIBUTES
=head2 Items => ArrayRef[L<Paws::ApiGatewayV2::DomainName>]
The elements from this collection.
=head2 NextToken => Str
The next page of elements from this collection. Not valid for the last
element of the collection.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::ApiGatewayV2>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 25.734375 | 133 | 0.726776 |
edcd5d5b6729b3273eea5f417367de0d2607e123 | 260 | t | Perl | tools/regression/usr.bin/make/syntax/enl/test.t | TrustedBSD/sebsd | fd5de6f587183087cf930779701d5713e8ca64cc | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 4 | 2017-04-06T21:39:15.000Z | 2019-10-09T17:34:14.000Z | tools/regression/usr.bin/make/syntax/enl/test.t | TrustedBSD/sebsd | fd5de6f587183087cf930779701d5713e8ca64cc | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | tools/regression/usr.bin/make/syntax/enl/test.t | TrustedBSD/sebsd | fd5de6f587183087cf930779701d5713e8ca64cc | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1 | 2020-01-04T06:36:39.000Z | 2020-01-04T06:36:39.000Z | #!/bin/sh
# $FreeBSD: src/tools/regression/usr.bin/make/syntax/enl/test.t,v 1.2 2005/10/18 07:20:14 harti Exp $
cd `dirname $0`
. ../../common.sh
# Description
DESC="Test escaped new-lines handling."
# Run
TEST_N=5
TEST_2_TODO="bug in parser"
eval_cmd $*
| 16.25 | 101 | 0.688462 |
eda1fc328a9d7e15615cbb5e7d8b95fbf0cb49de | 1,596 | pm | Perl | tests/yast2_gui/yast2_lan_ifcfg_errors.pm | lansuse/os-autoinst-distri-opensuse | 8e8c532236f2436693ec1da426de563c9759d778 | [
"FSFAP"
] | null | null | null | tests/yast2_gui/yast2_lan_ifcfg_errors.pm | lansuse/os-autoinst-distri-opensuse | 8e8c532236f2436693ec1da426de563c9759d778 | [
"FSFAP"
] | null | null | null | tests/yast2_gui/yast2_lan_ifcfg_errors.pm | lansuse/os-autoinst-distri-opensuse | 8e8c532236f2436693ec1da426de563c9759d778 | [
"FSFAP"
] | null | null | null | # SUSE's openQA tests
#
# Copyright 2021 SUSE LLC
#
# Copying and distribution of this file, with or without modification,
# are permitted in any medium without royalty provided the copyright
# notice and this notice are preserved. This file is offered as-is,
# without any warranty.
# Summary: Verify that yast2-lan does not crash if there are errors
# (like typos or duplicates) in one of the ifcfg files.
# Maintainer: QE YaST <[email protected]>
use base 'y2_module_guitest';
use strict;
use warnings;
use testapi;
use y2lan_restart_common qw(open_network_settings wait_for_xterm_to_be_visible close_xterm close_network_settings);
use x11utils 'start_root_shell_in_xterm';
use scheduler 'get_test_suite_data';
sub check_errors_in_ifcfg {
my ($error_in_ifcfg, $ifcfg_file) = @_;
assert_script_run("$error_in_ifcfg $ifcfg_file"); # Insert an error in ifcfg file
open_network_settings;
close_network_settings;
wait_for_xterm_to_be_visible();
}
sub run {
my $test_data = get_test_suite_data();
my $ifcfg_file = '/etc/sysconfig/network/ifcfg-' . $test_data->{net_device};
record_info('IFCFG', 'Verify that putting wrong settings in ifcfg files do not provoke a crash');
start_root_shell_in_xterm();
assert_script_run("cat $ifcfg_file > backup");
foreach my $error_in_ifcfg (@{$test_data->{errors_in_ifcfg_file}}) {
check_errors_in_ifcfg($error_in_ifcfg, $ifcfg_file); # See descriptions of errors in test_data
assert_script_run("cat backup > $ifcfg_file");
}
assert_script_run("rm backup");
close_xterm();
}
1;
| 35.466667 | 115 | 0.746867 |
ede3fcfbe891069e699556b904a11e15a7e39027 | 130 | pl | Perl | problog/someheads.pl | BouweCeunen/prolog-and-derivatives | 0c12aa6e0598f4ad183f91534492716803f03d5f | [
"Condor-1.1"
] | null | null | null | problog/someheads.pl | BouweCeunen/prolog-and-derivatives | 0c12aa6e0598f4ad183f91534492716803f03d5f | [
"Condor-1.1"
] | null | null | null | problog/someheads.pl | BouweCeunen/prolog-and-derivatives | 0c12aa6e0598f4ad183f91534492716803f03d5f | [
"Condor-1.1"
] | null | null | null | % Probabilistic facts:
0.5::heads1.
0.6::heads2.
% Rules:
someHeads :- heads1.
someHeads :- heads2.
% Queries:
query(someHeads). | 13 | 22 | 0.692308 |
ed4a3f92f5d327b5ced7c1127ec11f623adf3e80 | 14,301 | pl | Perl | Libraries/openssl/util/pl/VC-32.pl | mbert/mulberry-main | 6b7951a3ca56e01a7be67aa12e55bfeafc63950d | [
"ECL-2.0",
"Apache-2.0"
] | 12 | 2015-04-21T16:10:43.000Z | 2021-11-05T13:41:46.000Z | util/pl/VC-32.pl | mbert/mulberry-vendor-openssl | afd56985cea5e3f7d5affc7b43a9a7d084fc7c24 | [
"OpenSSL"
] | 2 | 2015-11-02T13:32:11.000Z | 2019-07-10T21:11:21.000Z | Libraries/openssl/util/pl/VC-32.pl | mbert/mulberry-main | 6b7951a3ca56e01a7be67aa12e55bfeafc63950d | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2015-01-12T08:49:12.000Z | 2021-03-27T09:11:10.000Z | #!/usr/local/bin/perl
# VC-32.pl - unified script for Microsoft Visual C++, covering Win32,
# Win64 and WinCE [follow $FLAVOR variable to trace the differences].
#
$ssl= "ssleay32";
if ($fips && !$shlib)
{
$crypto="libeayfips32";
$crypto_compat = "libeaycompat32.lib";
}
else
{
$crypto="libeay32";
}
if ($fipscanisterbuild)
{
$fips_canister_path = "\$(LIB_D)\\fipscanister.lib";
}
$o='\\';
$cp='$(PERL) util/copy.pl';
$mkdir='$(PERL) util/mkdir-p.pl';
$rm='del /Q';
$zlib_lib="zlib1.lib";
# Santize -L options for ms link
$l_flags =~ s/-L("\[^"]+")/\/libpath:$1/g;
$l_flags =~ s/-L(\S+)/\/libpath:$1/g;
# C compiler stuff
$cc='cl';
if ($FLAVOR =~ /WIN64/)
{
# Note that we currently don't have /WX on Win64! There is a lot of
# warnings, but only of two types:
#
# C4344: conversion from '__int64' to 'int/long', possible loss of data
# C4267: conversion from 'size_t' to 'int/long', possible loss of data
#
# Amount of latter type is minimized by aliasing strlen to function of
# own desing and limiting its return value to 2GB-1 (see e_os.h). As
# per 0.9.8 release remaining warnings were explicitly examined and
# considered safe to ignore.
#
$base_cflags=' /W3 /Gs0 /GF /Gy /nologo -DWIN32_LEAN_AND_MEAN -DL_ENDIAN -DDSO_WIN32 -DOPENSSL_SYSNAME_WIN32 -DOPENSSL_SYSNAME_WINNT -DUNICODE -D_UNICODE';
$base_cflags.=' -D_CRT_SECURE_NO_DEPRECATE'; # shut up VC8
$base_cflags.=' -D_CRT_NONSTDC_NO_DEPRECATE'; # shut up VC8
my $f = $shlib?' /MD':' /MT';
$lib_cflag='/Zl' if (!$shlib); # remove /DEFAULTLIBs from static lib
$opt_cflags=$f.' /Ox';
$dbg_cflags=$f.'d /Od -DDEBUG -D_DEBUG';
$lflags="/nologo /subsystem:console /opt:ref";
}
elsif ($FLAVOR =~ /CE/)
{
# sanity check
die '%OSVERSION% is not defined' if (!defined($ENV{'OSVERSION'}));
die '%PLATFORM% is not defined' if (!defined($ENV{'PLATFORM'}));
die '%TARGETCPU% is not defined' if (!defined($ENV{'TARGETCPU'}));
#
# Idea behind this is to mimic flags set by eVC++ IDE...
#
$wcevers = $ENV{'OSVERSION'}; # WCENNN
die '%OSVERSION% value is insane' if ($wcevers !~ /^WCE([1-9])([0-9]{2})$/);
$wcecdefs = "-D_WIN32_WCE=$1$2 -DUNDER_CE=$1$2"; # -D_WIN32_WCE=NNN
$wcelflag = "/subsystem:windowsce,$1.$2"; # ...,N.NN
$wceplatf = $ENV{'PLATFORM'};
$wceplatf =~ tr/a-z0-9 /A-Z0-9_/d;
$wcecdefs .= " -DWCE_PLATFORM_$wceplatf";
$wcetgt = $ENV{'TARGETCPU'}; # just shorter name...
SWITCH: for($wcetgt) {
/^X86/ && do { $wcecdefs.=" -Dx86 -D_X86_ -D_i386_ -Di_386_";
$wcelflag.=" /machine:IX86"; last; };
/^ARMV4[IT]/ && do { $wcecdefs.=" -DARM -D_ARM_ -D$wcetgt";
$wcecdefs.=" -DTHUMB -D_THUMB_" if($wcetgt=~/T$/);
$wcecdefs.=" -QRarch4T -QRinterwork-return";
$wcelflag.=" /machine:THUMB"; last; };
/^ARM/ && do { $wcecdefs.=" -DARM -D_ARM_ -D$wcetgt";
$wcelflag.=" /machine:ARM"; last; };
/^MIPSIV/ && do { $wcecdefs.=" -DMIPS -D_MIPS_ -DR4000 -D$wcetgt";
$wcecdefs.=" -D_MIPS64 -QMmips4 -QMn32";
$wcelflag.=" /machine:MIPSFPU"; last; };
/^MIPS16/ && do { $wcecdefs.=" -DMIPS -D_MIPS_ -DR4000 -D$wcetgt";
$wcecdefs.=" -DMIPSII -QMmips16";
$wcelflag.=" /machine:MIPS16"; last; };
/^MIPSII/ && do { $wcecdefs.=" -DMIPS -D_MIPS_ -DR4000 -D$wcetgt";
$wcecdefs.=" -QMmips2";
$wcelflag.=" /machine:MIPS"; last; };
/^R4[0-9]{3}/ && do { $wcecdefs.=" -DMIPS -D_MIPS_ -DR4000";
$wcelflag.=" /machine:MIPS"; last; };
/^SH[0-9]/ && do { $wcecdefs.=" -D$wcetgt -D_$wcetgt_ -DSHx";
$wcecdefs.=" -Qsh4" if ($wcetgt =~ /^SH4/);
$wcelflag.=" /machine:$wcetgt"; last; };
{ $wcecdefs.=" -D$wcetgt -D_$wcetgt_";
$wcelflag.=" /machine:$wcetgt"; last; };
}
$cc='$(CC)';
$base_cflags=' /W3 /WX /GF /Gy /nologo -DUNICODE -D_UNICODE -DOPENSSL_SYSNAME_WINCE -DWIN32_LEAN_AND_MEAN -DL_ENDIAN -DDSO_WIN32 -DNO_CHMOD -I$(WCECOMPAT)/include -DOPENSSL_SMALL_FOOTPRINT';
$base_cflags.=" $wcecdefs";
$opt_cflags=' /MC /O1i'; # optimize for space, but with intrinsics...
$dbg_clfags=' /MC /Od -DDEBUG -D_DEBUG';
$lflags="/nologo /opt:ref $wcelflag";
}
else # Win32
{
$base_cflags=' /W3 /WX /Gs0 /GF /Gy /nologo -DOPENSSL_SYSNAME_WIN32 -DWIN32_LEAN_AND_MEAN -DL_ENDIAN -DDSO_WIN32';
$base_cflags.=' -D_CRT_SECURE_NO_DEPRECATE'; # shut up VC8
$base_cflags.=' -D_CRT_NONSTDC_NO_DEPRECATE'; # shut up VC8
my $f = $shlib || $fips ?' /MD':' /MT';
$lib_cflag='/Zl' if (!$shlib); # remove /DEFAULTLIBs from static lib
$opt_cflags=$f.' /Ox /O2 /Ob2';
$dbg_cflags=$f.'d /Od -DDEBUG -D_DEBUG';
$lflags="/nologo /subsystem:console /opt:ref";
}
$mlflags='';
$out_def="out32"; $out_def.='_$(TARGETCPU)' if ($FLAVOR =~ /CE/);
$tmp_def="tmp32"; $tmp_def.='_$(TARGETCPU)' if ($FLAVOR =~ /CE/);
$inc_def="inc32";
if ($debug)
{
$cflags=$dbg_cflags.$base_cflags;
$lflags.=" /debug";
$mlflags.=' /debug';
}
else
{
$cflags=$opt_cflags.$base_cflags;
}
$obj='.obj';
$ofile="/Fo";
# EXE linking stuff
$link="link";
$rsc="rc";
$efile="/out:";
$exep='.exe';
if ($no_sock) { $ex_libs=''; }
elsif ($FLAVOR =~ /CE/) { $ex_libs='winsock.lib'; }
else { $ex_libs='wsock32.lib'; }
my $oflow;
if ($FLAVOR =~ /WIN64/ and `cl 2>&1` =~ /14\.00\.4[0-9]{4}\./)
{
$oflow=' bufferoverflowu.lib';
}
else
{
$oflow="";
}
if ($FLAVOR =~ /CE/)
{
$ex_libs.=' $(WCECOMPAT)/lib/wcecompatex.lib';
$ex_libs.=' /nodefaultlib:oldnames.lib coredll.lib corelibc.lib' if ($ENV{'TARGETCPU'} eq "X86");
}
else
{
$ex_libs.=' gdi32.lib crypt32.lib advapi32.lib user32.lib';
$ex_libs.= $oflow;
}
# As native NT API is pure UNICODE, our WIN-NT build defaults to UNICODE,
# but gets linked with unicows.lib to ensure backward compatibility.
if ($FLAVOR =~ /NT/)
{
$cflags.=" -DOPENSSL_SYSNAME_WINNT -DUNICODE -D_UNICODE";
$ex_libs="unicows.lib $ex_libs";
}
# static library stuff
$mklib='lib /nologo';
$ranlib='';
$plib="";
$libp=".lib";
$shlibp=($shlib)?".dll":".lib";
$lfile='/out:';
$shlib_ex_obj="";
$app_ex_obj="setargv.obj" if ($FLAVOR !~ /CE/);
if ($nasm) {
my $ver=`nasm -v 2>NUL`;
my $vew=`nasmw -v 2>NUL`;
# pick newest version
$asm=($ver gt $vew?"nasm":"nasmw")." -f win32";
$afile='-o ';
} elsif ($ml64) {
$asm='ml64 /c /Cp /Cx';
$asm.=' /Zi' if $debug;
$afile='/Fo';
} else {
$asm='ml /nologo /Cp /coff /c /Cx';
$asm.=" /Zi" if $debug;
$afile='/Fo';
}
$aes_asm_obj='';
$bn_asm_obj='';
$bn_asm_src='';
$des_enc_obj='';
$des_enc_src='';
$bf_enc_obj='';
$bf_enc_src='';
if (!$no_asm)
{
if ($FLAVOR =~ "WIN32")
{
$aes_asm_obj='crypto\aes\asm\a_win32.obj';
$aes_asm_src='crypto\aes\asm\a_win32.asm';
$bn_asm_obj='crypto\bn\asm\bn_win32.obj crypto\bn\asm\mt_win32.obj';
$bn_asm_src='crypto\bn\asm\bn_win32.asm crypto\bn\asm\mt_win32.asm';
$bnco_asm_obj='crypto\bn\asm\co_win32.obj';
$bnco_asm_src='crypto\bn\asm\co_win32.asm';
$des_enc_obj='crypto\des\asm\d_win32.obj crypto\des\asm\y_win32.obj';
$des_enc_src='crypto\des\asm\d_win32.asm crypto\des\asm\y_win32.asm';
$bf_enc_obj='crypto\bf\asm\b_win32.obj';
$bf_enc_src='crypto\bf\asm\b_win32.asm';
$cast_enc_obj='crypto\cast\asm\c_win32.obj';
$cast_enc_src='crypto\cast\asm\c_win32.asm';
$rc4_enc_obj='crypto\rc4\asm\r4_win32.obj';
$rc4_enc_src='crypto\rc4\asm\r4_win32.asm';
$rc5_enc_obj='crypto\rc5\asm\r5_win32.obj';
$rc5_enc_src='crypto\rc5\asm\r5_win32.asm';
$md5_asm_obj='crypto\md5\asm\m5_win32.obj';
$md5_asm_src='crypto\md5\asm\m5_win32.asm';
$sha1_asm_obj='crypto\sha\asm\s1_win32.obj crypto\sha\asm\sha512-sse2.obj';
$sha1_asm_src='crypto\sha\asm\s1_win32.asm crypto\sha\asm\sha512-sse2.asm';
$rmd160_asm_obj='crypto\ripemd\asm\rm_win32.obj';
$rmd160_asm_src='crypto\ripemd\asm\rm_win32.asm';
$cpuid_asm_obj='crypto\cpu_win32.obj';
$cpuid_asm_src='crypto\cpu_win32.asm';
$cflags.=" -DOPENSSL_CPUID_OBJ -DOPENSSL_IA32_SSE2 -DAES_ASM -DBN_ASM -DOPENSSL_BN_ASM_PART_WORDS -DOPENSSL_BN_ASM_MONT -DMD5_ASM -DSHA1_ASM -DRMD160_ASM";
}
elsif ($FLAVOR =~ "WIN64A")
{
$aes_asm_obj='$(OBJ_D)\aes-x86_64.obj';
$aes_asm_src='crypto\aes\asm\aes-x86_64.asm';
$bn_asm_obj='$(OBJ_D)\x86_64-mont.obj $(OBJ_D)\bn_asm.obj';
$bn_asm_src='crypto\bn\asm\x86_64-mont.asm';
$sha1_asm_obj='$(OBJ_D)\sha1-x86_64.obj $(OBJ_D)\sha256-x86_64.obj $(OBJ_D)\sha512-x86_64.obj';
$sha1_asm_src='crypto\sha\asm\sha1-x86_64.asm crypto\sha\asm\sha256-x86_64.asm crypto\sha\asm\sha512-x86_64.asm';
$cpuid_asm_obj='$(OBJ_D)\cpuid-x86_64.obj';
$cpuid_asm_src='crypto\cpuid-x86_64.asm';
$cflags.=" -DOPENSSL_CPUID_OBJ -DAES_ASM -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DSHA512_ASM";
}
}
if ($shlib && $FLAVOR !~ /CE/)
{
$mlflags.=" $lflags /dll";
# $cflags =~ s| /MD| /MT|;
$lib_cflag=" -D_WINDLL";
$out_def="out32dll";
$tmp_def="tmp32dll";
#
# Engage Applink...
#
$app_ex_obj.=" \$(OBJ_D)\\applink.obj /implib:\$(TMP_D)\\junk.lib";
$cflags.=" -DOPENSSL_USE_APPLINK -I.";
# I'm open for better suggestions than overriding $banner...
$banner=<<'___';
@echo Building OpenSSL
$(OBJ_D)\applink.obj: ms\applink.c
$(CC) /Fo$(OBJ_D)\applink.obj $(APP_CFLAGS) -c ms\applink.c
$(OBJ_D)\uplink.obj: ms\uplink.c ms\applink.c
$(CC) /Fo$(OBJ_D)\uplink.obj $(SHLIB_CFLAGS) -c ms\uplink.c
$(INCO_D)\applink.c: ms\applink.c
$(CP) ms\applink.c $(INCO_D)\applink.c
EXHEADER= $(EXHEADER) $(INCO_D)\applink.c
LIBS_DEP=$(LIBS_DEP) $(OBJ_D)\applink.obj
___
$banner .= "CRYPTOOBJ=\$(OBJ_D)\\uplink.obj \$(CRYPTOOBJ)\n";
$banner.=<<'___' if ($FLAVOR =~ /WIN64/);
CRYPTOOBJ=ms\uptable.obj $(CRYPTOOBJ)
___
}
elsif ($shlib && $FLAVOR =~ /CE/)
{
$mlflags.=" $lflags /dll";
$lib_cflag=" -D_WINDLL -D_DLL";
$out_def='out32dll_$(TARGETCPU)';
$tmp_def='tmp32dll_$(TARGETCPU)';
}
$cflags.=" /Fd$out_def";
sub do_lib_rule
{
my($objs,$target,$name,$shlib,$ign,$base_addr) = @_;
local($ret);
$taget =~ s/\//$o/g if $o ne '/';
my $base_arg;
if ($base_addr ne "")
{
$base_arg= " /base:$base_addr";
}
else
{
$base_arg = "";
}
if ($target =~ /O_CRYPTO/ && $fipsdso)
{
$name = "/def:ms/libeayfips.def";
}
elsif ($name ne "")
{
$name =~ tr/a-z/A-Z/;
$name = "/def:ms/${name}.def";
}
# $target="\$(LIB_D)$o$target";
# $ret.="$target: $objs\n";
if (!$shlib)
{
# $ret.="\t\$(RM) \$(O_$Name)\n";
$ex =' ';
$ret.="$target: $objs\n";
$ret.="\t\$(MKLIB) $lfile$target @<<\n $objs $ex\n<<\n";
}
else
{
my $ex = "";
if ($target =~ /O_SSL/)
{
$ex .= " \$(L_CRYPTO)";
#$ex .= " \$(L_FIPS)" if $fipsdso;
}
my $fipstarget;
if ($fipsdso)
{
$fipstarget = "O_FIPS";
}
else
{
$fipstarget = "O_CRYPTO";
}
if ($name eq "")
{
$ex.= $oflow;
if ($target =~ /capi/)
{
$ex.=' crypt32.lib advapi32.lib';
}
}
elsif ($FLAVOR =~ /CE/)
{
$ex.=' winsock.lib $(WCECOMPAT)/lib/wcecompatex.lib';
}
else
{
$ex.=' unicows.lib' if ($FLAVOR =~ /NT/);
$ex.=' wsock32.lib gdi32.lib advapi32.lib user32.lib';
$ex.=' crypt32.lib';
$ex.= $oflow;
}
$ex.=" $zlib_lib" if $zlib_opt == 1 && $target =~ /O_CRYPTO/;
if ($fips && $target =~ /$fipstarget/)
{
$ex.= $mwex unless $fipscanisterbuild;
$ret.="$target: $objs \$(PREMAIN_DSO_EXE)";
if ($fipsdso)
{
$ex.=" \$(OBJ_D)\\\$(LIBFIPS).res";
$ret.=" \$(OBJ_D)\\\$(LIBFIPS).res";
$ret.=" ms/\$(LIBFIPS).def";
}
$ret.="\n\tSET FIPS_LINK=\$(LINK)\n";
$ret.="\tSET FIPS_CC=\$(CC)\n";
$ret.="\tSET FIPS_CC_ARGS=/Fo\$(OBJ_D)${o}fips_premain.obj \$(SHLIB_CFLAGS) -c\n";
$ret.="\tSET PREMAIN_DSO_EXE=\$(PREMAIN_DSO_EXE)\n";
$ret.="\tSET FIPS_SHA1_EXE=\$(FIPS_SHA1_EXE)\n";
$ret.="\tSET FIPS_TARGET=$target\n";
$ret.="\tSET FIPSLIB_D=\$(FIPSLIB_D)\n";
$ret.="\t\$(FIPSLINK) \$(MLFLAGS) /map $base_arg $efile$target ";
$ret.="$name @<<\n \$(SHLIB_EX_OBJ) $objs ";
$ret.="\$(OBJ_D)${o}fips_premain.obj $ex\n<<\n";
}
else
{
$ret.="$target: $objs";
if ($target =~ /O_CRYPTO/ && $fipsdso)
{
$ret .= " \$(O_FIPS)";
$ex .= " \$(L_FIPS)";
}
$ret.="\n\t\$(LINK) \$(MLFLAGS) $efile$target $name @<<\n \$(SHLIB_EX_OBJ) $objs $ex\n<<\n";
}
$ret.="\tIF EXIST \[email protected] mt -nologo -manifest \[email protected] -outputresource:\$@;2\n\n";
}
$ret.="\n";
return($ret);
}
sub do_link_rule
{
my($target,$files,$dep_libs,$libs,$standalone)=@_;
local($ret,$_);
$file =~ s/\//$o/g if $o ne '/';
$n=&bname($targer);
$ret.="$target: $files $dep_libs\n";
if ($standalone == 1)
{
$ret.=" \$(LINK) \$(LFLAGS) $efile$target @<<\n\t";
$ret.= "\$(EX_LIBS) " if ($files =~ /O_FIPSCANISTER/ && !$fipscanisterbuild);
$ret.="$files $libs\n<<\n";
}
elsif ($standalone == 2)
{
$ret.="\tSET FIPS_LINK=\$(LINK)\n";
$ret.="\tSET FIPS_CC=\$(CC)\n";
$ret.="\tSET FIPS_CC_ARGS=/Fo\$(OBJ_D)${o}fips_premain.obj \$(SHLIB_CFLAGS) -c\n";
$ret.="\tSET PREMAIN_DSO_EXE=\n";
$ret.="\tSET FIPS_TARGET=$target\n";
$ret.="\tSET FIPS_SHA1_EXE=\$(FIPS_SHA1_EXE)\n";
$ret.="\tSET FIPSLIB_D=\$(FIPSLIB_D)\n";
$ret.="\t\$(FIPSLINK) \$(LFLAGS) /map $efile$target @<<\n";
$ret.="\t\$(APP_EX_OBJ) $files \$(OBJ_D)${o}fips_premain.obj $libs\n<<\n";
}
else
{
$ret.="\t\$(LINK) \$(LFLAGS) $efile$target @<<\n";
$ret.="\t\$(APP_EX_OBJ) $files $libs\n<<\n";
}
$ret.="\tIF EXIST \[email protected] mt -nologo -manifest \[email protected] -outputresource:\$@;1\n\n";
return($ret);
}
sub do_rlink_rule
{
local($target,$rl_start, $rl_mid, $rl_end,$dep_libs,$libs)=@_;
local($ret,$_);
my $files = "$rl_start $rl_mid $rl_end";
$file =~ s/\//$o/g if $o ne '/';
$n=&bname($targer);
$ret.="$target: $files $dep_libs \$(FIPS_SHA1_EXE)\n";
$ret.="\t\$(PERL) ms\\segrenam.pl \$\$a $rl_start\n";
$ret.="\t\$(PERL) ms\\segrenam.pl \$\$b $rl_mid\n";
$ret.="\t\$(PERL) ms\\segrenam.pl \$\$c $rl_end\n";
$ret.="\t\$(MKLIB) $lfile$target @<<\n\t$files\n<<\n";
$ret.="\t\$(FIPS_SHA1_EXE) $target > ${target}.sha1\n";
$ret.="\t\$(PERL) util${o}copy.pl -stripcr fips${o}fips_premain.c \$(LIB_D)${o}fips_premain.c\n";
$ret.="\t\$(CP) fips${o}fips_premain.c.sha1 \$(LIB_D)${o}fips_premain.c.sha1\n";
$ret.="\n";
return($ret);
}
sub do_sdef_rule
{
my $ret = "ms/\$(LIBFIPS).def: \$(O_FIPSCANISTER)\n";
$ret.="\t\$(PERL) util/mksdef.pl \$(MLFLAGS) /out:dummy.dll /def:ms/libeay32.def @<<\n \$(O_FIPSCANISTER)\n<<\n";
$ret.="\n";
return $ret;
}
1;
| 29.91841 | 194 | 0.609398 |
ed9e2346fd9fe0d502e11aea2efbc4ad99fce3d1 | 6,255 | pl | Perl | external/win_perl/lib/auto/share/dist/DateTime-Locale/vai-Vaii.pl | phixion/l0phtcrack | 48ee2f711134e178dbedbd925640f6b3b663fbb5 | [
"Apache-2.0",
"MIT"
] | 2 | 2021-10-20T00:25:39.000Z | 2021-11-08T12:52:42.000Z | external/win_perl/lib/auto/share/dist/DateTime-Locale/vai-Vaii.pl | Brute-f0rce/l0phtcrack | 25f681c07828e5e68e0dd788d84cc13c154aed3d | [
"Apache-2.0",
"MIT"
] | null | null | null | external/win_perl/lib/auto/share/dist/DateTime-Locale/vai-Vaii.pl | Brute-f0rce/l0phtcrack | 25f681c07828e5e68e0dd788d84cc13c154aed3d | [
"Apache-2.0",
"MIT"
] | 1 | 2022-03-14T06:41:16.000Z | 2022-03-14T06:41:16.000Z | {
am_pm_abbreviated => [
"AM",
"PM",
],
available_formats => {
Bh => "h B",
Bhm => "h:mm B",
Bhms => "h:mm:ss B",
E => "ccc",
EBhm => "E h:mm B",
EBhms => "E h:mm:ss B",
EHm => "E HH:mm",
EHms => "E HH:mm:ss",
Ed => "d, E",
Ehm => "E h:mm a",
Ehms => "E h:mm:ss a",
Gy => "G y",
GyMMM => "G y MMM",
GyMMMEd => "G y MMM d, E",
GyMMMd => "G y MMM d",
H => "HH",
Hm => "HH:mm",
Hms => "HH:mm:ss",
Hmsv => "HH:mm:ss v",
Hmv => "HH:mm v",
M => "L",
MEd => "E, M/d",
MMM => "LLL",
MMMEd => "E, MMM d",
MMMMEd => "E, MMMM d",
"MMMMW-count-other" => "'week' W 'of' MMMM",
MMMMd => "MMMM d",
MMMd => "MMM d",
Md => "M/d",
d => "d",
h => "h a",
hm => "h:mm a",
hms => "h:mm:ss a",
hmsv => "h:mm:ss a v",
hmv => "h:mm a v",
ms => "mm:ss",
y => "y",
yM => "M/y",
yMEd => "E, M/d/y",
yMMM => "MMM y",
yMMMEd => "E, MMM d, y",
yMMMM => "MMMM y",
yMMMd => "y MMM d",
yMd => "y-MM-dd",
yQQQ => "QQQ y",
yQQQQ => "QQQQ y",
"yw-count-other" => "'week' w 'of' Y",
},
code => "vai-Vaii",
date_format_full => "EEEE, d MMMM y",
date_format_long => "d MMMM y",
date_format_medium => "d MMM y",
date_format_short => "dd/MM/y",
datetime_format_full => "{1} {0}",
datetime_format_long => "{1} {0}",
datetime_format_medium => "{1} {0}",
datetime_format_short => "{1} {0}",
day_format_abbreviated => [
"\N{U+a5f3}\N{U+a5e1}\N{U+a609}",
"\N{U+a55a}\N{U+a55e}\N{U+a55a}",
"\N{U+a549}\N{U+a55e}\N{U+a552}",
"\N{U+a549}\N{U+a524}\N{U+a546}\N{U+a562}",
"\N{U+a549}\N{U+a524}\N{U+a540}\N{U+a56e}",
"\N{U+a53b}\N{U+a52c}\N{U+a533}",
"\N{U+a55e}\N{U+a54c}\N{U+a535}",
],
day_format_narrow => [
"M",
"T",
"W",
"T",
"F",
"S",
"S",
],
day_format_wide => [
"\N{U+a5f3}\N{U+a5e1}\N{U+a609}",
"\N{U+a55a}\N{U+a55e}\N{U+a55a}",
"\N{U+a549}\N{U+a55e}\N{U+a552}",
"\N{U+a549}\N{U+a524}\N{U+a546}\N{U+a562}",
"\N{U+a549}\N{U+a524}\N{U+a540}\N{U+a56e}",
"\N{U+a53b}\N{U+a52c}\N{U+a533}",
"\N{U+a55e}\N{U+a54c}\N{U+a535}",
],
day_stand_alone_abbreviated => [
"\N{U+a5f3}\N{U+a5e1}\N{U+a609}",
"\N{U+a55a}\N{U+a55e}\N{U+a55a}",
"\N{U+a549}\N{U+a55e}\N{U+a552}",
"\N{U+a549}\N{U+a524}\N{U+a546}\N{U+a562}",
"\N{U+a549}\N{U+a524}\N{U+a540}\N{U+a56e}",
"\N{U+a53b}\N{U+a52c}\N{U+a533}",
"\N{U+a55e}\N{U+a54c}\N{U+a535}",
],
day_stand_alone_narrow => [
"M",
"T",
"W",
"T",
"F",
"S",
"S",
],
day_stand_alone_wide => [
"\N{U+a5f3}\N{U+a5e1}\N{U+a609}",
"\N{U+a55a}\N{U+a55e}\N{U+a55a}",
"\N{U+a549}\N{U+a55e}\N{U+a552}",
"\N{U+a549}\N{U+a524}\N{U+a546}\N{U+a562}",
"\N{U+a549}\N{U+a524}\N{U+a540}\N{U+a56e}",
"\N{U+a53b}\N{U+a52c}\N{U+a533}",
"\N{U+a55e}\N{U+a54c}\N{U+a535}",
],
era_abbreviated => [
"BCE",
"CE",
],
era_narrow => [
"BCE",
"CE",
],
era_wide => [
"BCE",
"CE",
],
first_day_of_week => 1,
glibc_date_1_format => "%a %b %e %H:%M:%S %Z %Y",
glibc_date_format => "%m/%d/%y",
glibc_datetime_format => "%a %b %e %H:%M:%S %Y",
glibc_time_12_format => "%I:%M:%S %p",
glibc_time_format => "%H:%M:%S",
language => "Vai",
month_format_abbreviated => [
"\N{U+a5a8}\N{U+a56a}\N{U+a583}",
"\N{U+a552}\N{U+a561}",
"\N{U+a57e}\N{U+a5ba}",
"\N{U+a5a2}\N{U+a595}",
"\N{U+a591}\N{U+a571}",
"\N{U+a5b1}\N{U+a60b}",
"\N{U+a5b1}\N{U+a55e}",
"\N{U+a5db}\N{U+a515}",
"\N{U+a562}\N{U+a54c}",
"\N{U+a56d}\N{U+a583}",
"\N{U+a51e}\N{U+a60b}",
"\N{U+a5a8}\N{U+a56a}\N{U+a571}",
],
month_format_narrow => [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
],
month_format_wide => [
"\N{U+a5a8}\N{U+a56a}\N{U+a583} \N{U+a51e}\N{U+a56e}",
"\N{U+a552}\N{U+a561}\N{U+a59d}\N{U+a595}",
"\N{U+a57e}\N{U+a5ba}",
"\N{U+a5a2}\N{U+a595}",
"\N{U+a591}\N{U+a571}",
"\N{U+a5b1}\N{U+a60b}",
"\N{U+a5b1}\N{U+a55e}\N{U+a524}",
"\N{U+a5db}\N{U+a515}",
"\N{U+a562}\N{U+a54c}",
"\N{U+a56d}\N{U+a583}",
"\N{U+a51e}\N{U+a60b}\N{U+a554}\N{U+a57f} \N{U+a578}\N{U+a583}\N{U+a5cf}",
"\N{U+a5a8}\N{U+a56a}\N{U+a571} \N{U+a5cf}\N{U+a56e}",
],
month_stand_alone_abbreviated => [
"\N{U+a5a8}\N{U+a56a}\N{U+a583}",
"\N{U+a552}\N{U+a561}",
"\N{U+a57e}\N{U+a5ba}",
"\N{U+a5a2}\N{U+a595}",
"\N{U+a591}\N{U+a571}",
"\N{U+a5b1}\N{U+a60b}",
"\N{U+a5b1}\N{U+a55e}",
"\N{U+a5db}\N{U+a515}",
"\N{U+a562}\N{U+a54c}",
"\N{U+a56d}\N{U+a583}",
"\N{U+a51e}\N{U+a60b}",
"\N{U+a5a8}\N{U+a56a}\N{U+a571}",
],
month_stand_alone_narrow => [
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
],
month_stand_alone_wide => [
"\N{U+a5a8}\N{U+a56a}\N{U+a583} \N{U+a51e}\N{U+a56e}",
"\N{U+a552}\N{U+a561}\N{U+a59d}\N{U+a595}",
"\N{U+a57e}\N{U+a5ba}",
"\N{U+a5a2}\N{U+a595}",
"\N{U+a591}\N{U+a571}",
"\N{U+a5b1}\N{U+a60b}",
"\N{U+a5b1}\N{U+a55e}\N{U+a524}",
"\N{U+a5db}\N{U+a515}",
"\N{U+a562}\N{U+a54c}",
"\N{U+a56d}\N{U+a583}",
"\N{U+a51e}\N{U+a60b}\N{U+a554}\N{U+a57f} \N{U+a578}\N{U+a583}\N{U+a5cf}",
"\N{U+a5a8}\N{U+a56a}\N{U+a571} \N{U+a5cf}\N{U+a56e}",
],
name => "Vai Vai",
native_language => "\N{U+a559}\N{U+a524}",
native_name => "\N{U+a559}\N{U+a524} Vaii",
native_script => "Vaii",
native_territory => undef,
native_variant => undef,
quarter_format_abbreviated => [
"Q1",
"Q2",
"Q3",
"Q4",
],
quarter_format_narrow => [
1,
2,
3,
4,
],
quarter_format_wide => [
"Q1",
"Q2",
"Q3",
"Q4",
],
quarter_stand_alone_abbreviated => [
"Q1",
"Q2",
"Q3",
"Q4",
],
quarter_stand_alone_narrow => [
1,
2,
3,
4,
],
quarter_stand_alone_wide => [
"Q1",
"Q2",
"Q3",
"Q4",
],
script => "Vai",
territory => undef,
time_format_full => "h:mm:ss a zzzz",
time_format_long => "h:mm:ss a z",
time_format_medium => "h:mm:ss a",
time_format_short => "h:mm a",
variant => undef,
version => 32,
}
| 22.996324 | 78 | 0.46219 |
ed9018af51135b5ee62b8f4b48754c6fa42a2128 | 56,087 | pl | Perl | crypto/ec/asm/ecp_nistz256-avx2.pl | jeapi-b/openssl | a8f730d5d240abe56b1d14c18223db3fd20f09e2 | [
"OpenSSL"
] | 1 | 2017-12-04T12:08:18.000Z | 2017-12-04T12:08:18.000Z | crypto/ec/asm/ecp_nistz256-avx2.pl | jeapi-b/openssl | a8f730d5d240abe56b1d14c18223db3fd20f09e2 | [
"OpenSSL"
] | null | null | null | crypto/ec/asm/ecp_nistz256-avx2.pl | jeapi-b/openssl | a8f730d5d240abe56b1d14c18223db3fd20f09e2 | [
"OpenSSL"
] | 1 | 2019-07-07T15:59:16.000Z | 2019-07-07T15:59:16.000Z | #! /usr/bin/env perl
# Copyright 2014-2016 The OpenSSL Project Authors. All Rights Reserved.
# Copyright (c) 2014, Intel Corporation. All Rights Reserved.
#
# Licensed under the OpenSSL license (the "License"). You may not use
# this file except in compliance with the License. You can obtain a copy
# in the file LICENSE in the source distribution or at
# https://www.openssl.org/source/license.html
#
# Originally written by Shay Gueron (1, 2), and Vlad Krasnov (1)
# (1) Intel Corporation, Israel Development Center, Haifa, Israel
# (2) University of Haifa, Israel
#
# Reference:
# S.Gueron and V.Krasnov, "Fast Prime Field Elliptic Curve Cryptography with
# 256 Bit Primes"
$flavour = shift;
$output = shift;
if ($flavour =~ /\./) { $output = $flavour; undef $flavour; }
$win64=0; $win64=1 if ($flavour =~ /[nm]asm|mingw64/ || $output =~ /\.asm$/);
$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
( $xlate="${dir}x86_64-xlate.pl" and -f $xlate ) or
( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or
die "can't locate x86_64-xlate.pl";
open OUT,"| \"$^X\" $xlate $flavour $output";
*STDOUT=*OUT;
if (`$ENV{CC} -Wa,-v -c -o /dev/null -x assembler /dev/null 2>&1`
=~ /GNU assembler version ([2-9]\.[0-9]+)/) {
$avx = ($1>=2.19) + ($1>=2.22);
$addx = ($1>=2.23);
}
if (!$addx && $win64 && ($flavour =~ /nasm/ || $ENV{ASM} =~ /nasm/) &&
`nasm -v 2>&1` =~ /NASM version ([2-9]\.[0-9]+)/) {
$avx = ($1>=2.09) + ($1>=2.10);
$addx = ($1>=2.10);
}
if (!$addx && $win64 && ($flavour =~ /masm/ || $ENV{ASM} =~ /ml64/) &&
`ml64 2>&1` =~ /Version ([0-9]+)\./) {
$avx = ($1>=10) + ($1>=11);
$addx = ($1>=12);
}
if (!$addx && `$ENV{CC} -v 2>&1` =~ /(^clang version|based on LLVM) ([3-9])\.([0-9]+)/) {
my $ver = $2 + $3/100.0; # 3.1->3.01, 3.10->3.10
$avx = ($ver>=3.0) + ($ver>=3.01);
$addx = ($ver>=3.03);
}
if ($avx>=2) {{
$digit_size = "\$29";
$n_digits = "\$9";
$code.=<<___;
.text
.align 64
.LAVX2_AND_MASK:
.LAVX2_POLY:
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x000001ff, 0x000001ff, 0x000001ff, 0x000001ff
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
.quad 0x00040000, 0x00040000, 0x00040000, 0x00040000
.quad 0x1fe00000, 0x1fe00000, 0x1fe00000, 0x1fe00000
.quad 0x00ffffff, 0x00ffffff, 0x00ffffff, 0x00ffffff
.LAVX2_POLY_x2:
.quad 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC
.quad 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC
.quad 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC
.quad 0x400007FC, 0x400007FC, 0x400007FC, 0x400007FC
.quad 0x3FFFFFFE, 0x3FFFFFFE, 0x3FFFFFFE, 0x3FFFFFFE
.quad 0x3FFFFFFE, 0x3FFFFFFE, 0x3FFFFFFE, 0x3FFFFFFE
.quad 0x400FFFFE, 0x400FFFFE, 0x400FFFFE, 0x400FFFFE
.quad 0x7F7FFFFE, 0x7F7FFFFE, 0x7F7FFFFE, 0x7F7FFFFE
.quad 0x03FFFFFC, 0x03FFFFFC, 0x03FFFFFC, 0x03FFFFFC
.LAVX2_POLY_x8:
.quad 0xFFFFFFF8, 0xFFFFFFF8, 0xFFFFFFF8, 0xFFFFFFF8
.quad 0xFFFFFFF8, 0xFFFFFFF8, 0xFFFFFFF8, 0xFFFFFFF8
.quad 0xFFFFFFF8, 0xFFFFFFF8, 0xFFFFFFF8, 0xFFFFFFF8
.quad 0x80000FF8, 0x80000FF8, 0x80000FF8, 0x80000FF8
.quad 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC
.quad 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC, 0x7FFFFFFC
.quad 0x801FFFFC, 0x801FFFFC, 0x801FFFFC, 0x801FFFFC
.quad 0xFEFFFFFC, 0xFEFFFFFC, 0xFEFFFFFC, 0xFEFFFFFC
.quad 0x07FFFFF8, 0x07FFFFF8, 0x07FFFFF8, 0x07FFFFF8
.LONE:
.quad 0x00000020, 0x00000020, 0x00000020, 0x00000020
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
.quad 0x1fffc000, 0x1fffc000, 0x1fffc000, 0x1fffc000
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x1f7fffff, 0x1f7fffff, 0x1f7fffff, 0x1f7fffff
.quad 0x03ffffff, 0x03ffffff, 0x03ffffff, 0x03ffffff
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
# RR = 2^266 mod p in AVX2 format, to transform from the native OpenSSL
# Montgomery form (*2^256) to our format (*2^261)
.LTO_MONT_AVX2:
.quad 0x00000400, 0x00000400, 0x00000400, 0x00000400
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
.quad 0x1ff80000, 0x1ff80000, 0x1ff80000, 0x1ff80000
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x0fffffff, 0x0fffffff, 0x0fffffff, 0x0fffffff
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x00000003, 0x00000003, 0x00000003, 0x00000003
.LFROM_MONT_AVX2:
.quad 0x00000001, 0x00000001, 0x00000001, 0x00000001
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
.quad 0x1ffffe00, 0x1ffffe00, 0x1ffffe00, 0x1ffffe00
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x1fffffff, 0x1fffffff, 0x1fffffff, 0x1fffffff
.quad 0x1ffbffff, 0x1ffbffff, 0x1ffbffff, 0x1ffbffff
.quad 0x001fffff, 0x001fffff, 0x001fffff, 0x001fffff
.quad 0x00000000, 0x00000000, 0x00000000, 0x00000000
.LIntOne:
.long 1,1,1,1,1,1,1,1
___
{
# This function receives a pointer to an array of four affine points
# (X, Y, <1>) and rearanges the data for AVX2 execution, while
# converting it to 2^29 radix redundant form
my ($X0,$X1,$X2,$X3, $Y0,$Y1,$Y2,$Y3,
$T0,$T1,$T2,$T3, $T4,$T5,$T6,$T7)=map("%ymm$_",(0..15));
$code.=<<___;
.globl ecp_nistz256_avx2_transpose_convert
.type ecp_nistz256_avx2_transpose_convert,\@function,2
.align 64
ecp_nistz256_avx2_transpose_convert:
vzeroupper
___
$code.=<<___ if ($win64);
lea -8-16*10(%rsp), %rsp
vmovaps %xmm6, -8-16*10(%rax)
vmovaps %xmm7, -8-16*9(%rax)
vmovaps %xmm8, -8-16*8(%rax)
vmovaps %xmm9, -8-16*7(%rax)
vmovaps %xmm10, -8-16*6(%rax)
vmovaps %xmm11, -8-16*5(%rax)
vmovaps %xmm12, -8-16*4(%rax)
vmovaps %xmm13, -8-16*3(%rax)
vmovaps %xmm14, -8-16*2(%rax)
vmovaps %xmm15, -8-16*1(%rax)
___
$code.=<<___;
# Load the data
vmovdqa 32*0(%rsi), $X0
lea 112(%rsi), %rax # size optimization
vmovdqa 32*1(%rsi), $Y0
lea .LAVX2_AND_MASK(%rip), %rdx
vmovdqa 32*2(%rsi), $X1
vmovdqa 32*3(%rsi), $Y1
vmovdqa 32*4-112(%rax), $X2
vmovdqa 32*5-112(%rax), $Y2
vmovdqa 32*6-112(%rax), $X3
vmovdqa 32*7-112(%rax), $Y3
# Transpose X and Y independently
vpunpcklqdq $X1, $X0, $T0 # T0 = [B2 A2 B0 A0]
vpunpcklqdq $X3, $X2, $T1 # T1 = [D2 C2 D0 C0]
vpunpckhqdq $X1, $X0, $T2 # T2 = [B3 A3 B1 A1]
vpunpckhqdq $X3, $X2, $T3 # T3 = [D3 C3 D1 C1]
vpunpcklqdq $Y1, $Y0, $T4
vpunpcklqdq $Y3, $Y2, $T5
vpunpckhqdq $Y1, $Y0, $T6
vpunpckhqdq $Y3, $Y2, $T7
vperm2i128 \$0x20, $T1, $T0, $X0 # X0 = [D0 C0 B0 A0]
vperm2i128 \$0x20, $T3, $T2, $X1 # X1 = [D1 C1 B1 A1]
vperm2i128 \$0x31, $T1, $T0, $X2 # X2 = [D2 C2 B2 A2]
vperm2i128 \$0x31, $T3, $T2, $X3 # X3 = [D3 C3 B3 A3]
vperm2i128 \$0x20, $T5, $T4, $Y0
vperm2i128 \$0x20, $T7, $T6, $Y1
vperm2i128 \$0x31, $T5, $T4, $Y2
vperm2i128 \$0x31, $T7, $T6, $Y3
vmovdqa (%rdx), $T7
vpand (%rdx), $X0, $T0 # out[0] = in[0] & mask;
vpsrlq \$29, $X0, $X0
vpand $T7, $X0, $T1 # out[1] = (in[0] >> shift) & mask;
vpsrlq \$29, $X0, $X0
vpsllq \$6, $X1, $T2
vpxor $X0, $T2, $T2
vpand $T7, $T2, $T2 # out[2] = ((in[0] >> (shift*2)) ^ (in[1] << (64-shift*2))) & mask;
vpsrlq \$23, $X1, $X1
vpand $T7, $X1, $T3 # out[3] = (in[1] >> ((shift*3)%64)) & mask;
vpsrlq \$29, $X1, $X1
vpsllq \$12, $X2, $T4
vpxor $X1, $T4, $T4
vpand $T7, $T4, $T4 # out[4] = ((in[1] >> ((shift*4)%64)) ^ (in[2] << (64*2-shift*4))) & mask;
vpsrlq \$17, $X2, $X2
vpand $T7, $X2, $T5 # out[5] = (in[2] >> ((shift*5)%64)) & mask;
vpsrlq \$29, $X2, $X2
vpsllq \$18, $X3, $T6
vpxor $X2, $T6, $T6
vpand $T7, $T6, $T6 # out[6] = ((in[2] >> ((shift*6)%64)) ^ (in[3] << (64*3-shift*6))) & mask;
vpsrlq \$11, $X3, $X3
vmovdqa $T0, 32*0(%rdi)
lea 112(%rdi), %rax # size optimization
vpand $T7, $X3, $T0 # out[7] = (in[3] >> ((shift*7)%64)) & mask;
vpsrlq \$29, $X3, $X3 # out[8] = (in[3] >> ((shift*8)%64)) & mask;
vmovdqa $T1, 32*1(%rdi)
vmovdqa $T2, 32*2(%rdi)
vmovdqa $T3, 32*3(%rdi)
vmovdqa $T4, 32*4-112(%rax)
vmovdqa $T5, 32*5-112(%rax)
vmovdqa $T6, 32*6-112(%rax)
vmovdqa $T0, 32*7-112(%rax)
vmovdqa $X3, 32*8-112(%rax)
lea 448(%rdi), %rax # size optimization
vpand $T7, $Y0, $T0 # out[0] = in[0] & mask;
vpsrlq \$29, $Y0, $Y0
vpand $T7, $Y0, $T1 # out[1] = (in[0] >> shift) & mask;
vpsrlq \$29, $Y0, $Y0
vpsllq \$6, $Y1, $T2
vpxor $Y0, $T2, $T2
vpand $T7, $T2, $T2 # out[2] = ((in[0] >> (shift*2)) ^ (in[1] << (64-shift*2))) & mask;
vpsrlq \$23, $Y1, $Y1
vpand $T7, $Y1, $T3 # out[3] = (in[1] >> ((shift*3)%64)) & mask;
vpsrlq \$29, $Y1, $Y1
vpsllq \$12, $Y2, $T4
vpxor $Y1, $T4, $T4
vpand $T7, $T4, $T4 # out[4] = ((in[1] >> ((shift*4)%64)) ^ (in[2] << (64*2-shift*4))) & mask;
vpsrlq \$17, $Y2, $Y2
vpand $T7, $Y2, $T5 # out[5] = (in[2] >> ((shift*5)%64)) & mask;
vpsrlq \$29, $Y2, $Y2
vpsllq \$18, $Y3, $T6
vpxor $Y2, $T6, $T6
vpand $T7, $T6, $T6 # out[6] = ((in[2] >> ((shift*6)%64)) ^ (in[3] << (64*3-shift*6))) & mask;
vpsrlq \$11, $Y3, $Y3
vmovdqa $T0, 32*9-448(%rax)
vpand $T7, $Y3, $T0 # out[7] = (in[3] >> ((shift*7)%64)) & mask;
vpsrlq \$29, $Y3, $Y3 # out[8] = (in[3] >> ((shift*8)%64)) & mask;
vmovdqa $T1, 32*10-448(%rax)
vmovdqa $T2, 32*11-448(%rax)
vmovdqa $T3, 32*12-448(%rax)
vmovdqa $T4, 32*13-448(%rax)
vmovdqa $T5, 32*14-448(%rax)
vmovdqa $T6, 32*15-448(%rax)
vmovdqa $T0, 32*16-448(%rax)
vmovdqa $Y3, 32*17-448(%rax)
vzeroupper
___
$code.=<<___ if ($win64);
movaps 16*0(%rsp), %xmm6
movaps 16*1(%rsp), %xmm7
movaps 16*2(%rsp), %xmm8
movaps 16*3(%rsp), %xmm9
movaps 16*4(%rsp), %xmm10
movaps 16*5(%rsp), %xmm11
movaps 16*6(%rsp), %xmm12
movaps 16*7(%rsp), %xmm13
movaps 16*8(%rsp), %xmm14
movaps 16*9(%rsp), %xmm15
lea 8+16*10(%rsp), %rsp
___
$code.=<<___;
ret
.size ecp_nistz256_avx2_transpose_convert,.-ecp_nistz256_avx2_transpose_convert
___
}
{
################################################################################
# This function receives a pointer to an array of four AVX2 formatted points
# (X, Y, Z) convert the data to normal representation, and rearanges the data
my ($D0,$D1,$D2,$D3, $D4,$D5,$D6,$D7, $D8)=map("%ymm$_",(0..8));
my ($T0,$T1,$T2,$T3, $T4,$T5,$T6)=map("%ymm$_",(9..15));
$code.=<<___;
.globl ecp_nistz256_avx2_convert_transpose_back
.type ecp_nistz256_avx2_convert_transpose_back,\@function,2
.align 32
ecp_nistz256_avx2_convert_transpose_back:
vzeroupper
___
$code.=<<___ if ($win64);
lea -8-16*10(%rsp), %rsp
vmovaps %xmm6, -8-16*10(%rax)
vmovaps %xmm7, -8-16*9(%rax)
vmovaps %xmm8, -8-16*8(%rax)
vmovaps %xmm9, -8-16*7(%rax)
vmovaps %xmm10, -8-16*6(%rax)
vmovaps %xmm11, -8-16*5(%rax)
vmovaps %xmm12, -8-16*4(%rax)
vmovaps %xmm13, -8-16*3(%rax)
vmovaps %xmm14, -8-16*2(%rax)
vmovaps %xmm15, -8-16*1(%rax)
___
$code.=<<___;
mov \$3, %ecx
.Lconv_loop:
vmovdqa 32*0(%rsi), $D0
lea 160(%rsi), %rax # size optimization
vmovdqa 32*1(%rsi), $D1
vmovdqa 32*2(%rsi), $D2
vmovdqa 32*3(%rsi), $D3
vmovdqa 32*4-160(%rax), $D4
vmovdqa 32*5-160(%rax), $D5
vmovdqa 32*6-160(%rax), $D6
vmovdqa 32*7-160(%rax), $D7
vmovdqa 32*8-160(%rax), $D8
vpsllq \$29, $D1, $D1
vpsllq \$58, $D2, $T0
vpaddq $D1, $D0, $D0
vpaddq $T0, $D0, $D0 # out[0] = (in[0]) ^ (in[1] << shift*1) ^ (in[2] << shift*2);
vpsrlq \$6, $D2, $D2
vpsllq \$23, $D3, $D3
vpsllq \$52, $D4, $T1
vpaddq $D2, $D3, $D3
vpaddq $D3, $T1, $D1 # out[1] = (in[2] >> (64*1-shift*2)) ^ (in[3] << shift*3%64) ^ (in[4] << shift*4%64);
vpsrlq \$12, $D4, $D4
vpsllq \$17, $D5, $D5
vpsllq \$46, $D6, $T2
vpaddq $D4, $D5, $D5
vpaddq $D5, $T2, $D2 # out[2] = (in[4] >> (64*2-shift*4)) ^ (in[5] << shift*5%64) ^ (in[6] << shift*6%64);
vpsrlq \$18, $D6, $D6
vpsllq \$11, $D7, $D7
vpsllq \$40, $D8, $T3
vpaddq $D6, $D7, $D7
vpaddq $D7, $T3, $D3 # out[3] = (in[6] >> (64*3-shift*6)) ^ (in[7] << shift*7%64) ^ (in[8] << shift*8%64);
vpunpcklqdq $D1, $D0, $T0 # T0 = [B2 A2 B0 A0]
vpunpcklqdq $D3, $D2, $T1 # T1 = [D2 C2 D0 C0]
vpunpckhqdq $D1, $D0, $T2 # T2 = [B3 A3 B1 A1]
vpunpckhqdq $D3, $D2, $T3 # T3 = [D3 C3 D1 C1]
vperm2i128 \$0x20, $T1, $T0, $D0 # X0 = [D0 C0 B0 A0]
vperm2i128 \$0x20, $T3, $T2, $D1 # X1 = [D1 C1 B1 A1]
vperm2i128 \$0x31, $T1, $T0, $D2 # X2 = [D2 C2 B2 A2]
vperm2i128 \$0x31, $T3, $T2, $D3 # X3 = [D3 C3 B3 A3]
vmovdqa $D0, 32*0(%rdi)
vmovdqa $D1, 32*3(%rdi)
vmovdqa $D2, 32*6(%rdi)
vmovdqa $D3, 32*9(%rdi)
lea 32*9(%rsi), %rsi
lea 32*1(%rdi), %rdi
dec %ecx
jnz .Lconv_loop
vzeroupper
___
$code.=<<___ if ($win64);
movaps 16*0(%rsp), %xmm6
movaps 16*1(%rsp), %xmm7
movaps 16*2(%rsp), %xmm8
movaps 16*3(%rsp), %xmm9
movaps 16*4(%rsp), %xmm10
movaps 16*5(%rsp), %xmm11
movaps 16*6(%rsp), %xmm12
movaps 16*7(%rsp), %xmm13
movaps 16*8(%rsp), %xmm14
movaps 16*9(%rsp), %xmm15
lea 8+16*10(%rsp), %rsp
___
$code.=<<___;
ret
.size ecp_nistz256_avx2_convert_transpose_back,.-ecp_nistz256_avx2_convert_transpose_back
___
}
{
my ($r_ptr,$a_ptr,$b_ptr,$itr)=("%rdi","%rsi","%rdx","%ecx");
my ($ACC0,$ACC1,$ACC2,$ACC3,$ACC4,$ACC5,$ACC6,$ACC7,$ACC8)=map("%ymm$_",(0..8));
my ($B,$Y,$T0,$AND_MASK,$OVERFLOW)=map("%ymm$_",(9..13));
sub NORMALIZE {
my $ret=<<___;
vpsrlq $digit_size, $ACC0, $T0
vpand $AND_MASK, $ACC0, $ACC0
vpaddq $T0, $ACC1, $ACC1
vpsrlq $digit_size, $ACC1, $T0
vpand $AND_MASK, $ACC1, $ACC1
vpaddq $T0, $ACC2, $ACC2
vpsrlq $digit_size, $ACC2, $T0
vpand $AND_MASK, $ACC2, $ACC2
vpaddq $T0, $ACC3, $ACC3
vpsrlq $digit_size, $ACC3, $T0
vpand $AND_MASK, $ACC3, $ACC3
vpaddq $T0, $ACC4, $ACC4
vpsrlq $digit_size, $ACC4, $T0
vpand $AND_MASK, $ACC4, $ACC4
vpaddq $T0, $ACC5, $ACC5
vpsrlq $digit_size, $ACC5, $T0
vpand $AND_MASK, $ACC5, $ACC5
vpaddq $T0, $ACC6, $ACC6
vpsrlq $digit_size, $ACC6, $T0
vpand $AND_MASK, $ACC6, $ACC6
vpaddq $T0, $ACC7, $ACC7
vpsrlq $digit_size, $ACC7, $T0
vpand $AND_MASK, $ACC7, $ACC7
vpaddq $T0, $ACC8, $ACC8
#vpand $AND_MASK, $ACC8, $ACC8
___
$ret;
}
sub STORE {
my $ret=<<___;
vmovdqa $ACC0, 32*0(%rdi)
lea 160(%rdi), %rax # size optimization
vmovdqa $ACC1, 32*1(%rdi)
vmovdqa $ACC2, 32*2(%rdi)
vmovdqa $ACC3, 32*3(%rdi)
vmovdqa $ACC4, 32*4-160(%rax)
vmovdqa $ACC5, 32*5-160(%rax)
vmovdqa $ACC6, 32*6-160(%rax)
vmovdqa $ACC7, 32*7-160(%rax)
vmovdqa $ACC8, 32*8-160(%rax)
___
$ret;
}
$code.=<<___;
.type avx2_normalize,\@abi-omnipotent
.align 32
avx2_normalize:
vpsrlq $digit_size, $ACC0, $T0
vpand $AND_MASK, $ACC0, $ACC0
vpaddq $T0, $ACC1, $ACC1
vpsrlq $digit_size, $ACC1, $T0
vpand $AND_MASK, $ACC1, $ACC1
vpaddq $T0, $ACC2, $ACC2
vpsrlq $digit_size, $ACC2, $T0
vpand $AND_MASK, $ACC2, $ACC2
vpaddq $T0, $ACC3, $ACC3
vpsrlq $digit_size, $ACC3, $T0
vpand $AND_MASK, $ACC3, $ACC3
vpaddq $T0, $ACC4, $ACC4
vpsrlq $digit_size, $ACC4, $T0
vpand $AND_MASK, $ACC4, $ACC4
vpaddq $T0, $ACC5, $ACC5
vpsrlq $digit_size, $ACC5, $T0
vpand $AND_MASK, $ACC5, $ACC5
vpaddq $T0, $ACC6, $ACC6
vpsrlq $digit_size, $ACC6, $T0
vpand $AND_MASK, $ACC6, $ACC6
vpaddq $T0, $ACC7, $ACC7
vpsrlq $digit_size, $ACC7, $T0
vpand $AND_MASK, $ACC7, $ACC7
vpaddq $T0, $ACC8, $ACC8
#vpand $AND_MASK, $ACC8, $ACC8
ret
.size avx2_normalize,.-avx2_normalize
.type avx2_normalize_n_store,\@abi-omnipotent
.align 32
avx2_normalize_n_store:
vpsrlq $digit_size, $ACC0, $T0
vpand $AND_MASK, $ACC0, $ACC0
vpaddq $T0, $ACC1, $ACC1
vpsrlq $digit_size, $ACC1, $T0
vpand $AND_MASK, $ACC1, $ACC1
vmovdqa $ACC0, 32*0(%rdi)
lea 160(%rdi), %rax # size optimization
vpaddq $T0, $ACC2, $ACC2
vpsrlq $digit_size, $ACC2, $T0
vpand $AND_MASK, $ACC2, $ACC2
vmovdqa $ACC1, 32*1(%rdi)
vpaddq $T0, $ACC3, $ACC3
vpsrlq $digit_size, $ACC3, $T0
vpand $AND_MASK, $ACC3, $ACC3
vmovdqa $ACC2, 32*2(%rdi)
vpaddq $T0, $ACC4, $ACC4
vpsrlq $digit_size, $ACC4, $T0
vpand $AND_MASK, $ACC4, $ACC4
vmovdqa $ACC3, 32*3(%rdi)
vpaddq $T0, $ACC5, $ACC5
vpsrlq $digit_size, $ACC5, $T0
vpand $AND_MASK, $ACC5, $ACC5
vmovdqa $ACC4, 32*4-160(%rax)
vpaddq $T0, $ACC6, $ACC6
vpsrlq $digit_size, $ACC6, $T0
vpand $AND_MASK, $ACC6, $ACC6
vmovdqa $ACC5, 32*5-160(%rax)
vpaddq $T0, $ACC7, $ACC7
vpsrlq $digit_size, $ACC7, $T0
vpand $AND_MASK, $ACC7, $ACC7
vmovdqa $ACC6, 32*6-160(%rax)
vpaddq $T0, $ACC8, $ACC8
#vpand $AND_MASK, $ACC8, $ACC8
vmovdqa $ACC7, 32*7-160(%rax)
vmovdqa $ACC8, 32*8-160(%rax)
ret
.size avx2_normalize_n_store,.-avx2_normalize_n_store
################################################################################
# void avx2_mul_x4(void* RESULTx4, void *Ax4, void *Bx4);
.type avx2_mul_x4,\@abi-omnipotent
.align 32
avx2_mul_x4:
lea .LAVX2_POLY(%rip), %rax
vpxor $ACC0, $ACC0, $ACC0
vpxor $ACC1, $ACC1, $ACC1
vpxor $ACC2, $ACC2, $ACC2
vpxor $ACC3, $ACC3, $ACC3
vpxor $ACC4, $ACC4, $ACC4
vpxor $ACC5, $ACC5, $ACC5
vpxor $ACC6, $ACC6, $ACC6
vpxor $ACC7, $ACC7, $ACC7
vmovdqa 32*7(%rax), %ymm14
vmovdqa 32*8(%rax), %ymm15
mov $n_digits, $itr
lea -512($a_ptr), $a_ptr # strategic bias to control u-op density
jmp .Lavx2_mul_x4_loop
.align 32
.Lavx2_mul_x4_loop:
vmovdqa 32*0($b_ptr), $B
lea 32*1($b_ptr), $b_ptr
vpmuludq 32*0+512($a_ptr), $B, $T0
vpmuludq 32*1+512($a_ptr), $B, $OVERFLOW # borrow $OVERFLOW
vpaddq $T0, $ACC0, $ACC0
vpmuludq 32*2+512($a_ptr), $B, $T0
vpaddq $OVERFLOW, $ACC1, $ACC1
vpand $AND_MASK, $ACC0, $Y
vpmuludq 32*3+512($a_ptr), $B, $OVERFLOW
vpaddq $T0, $ACC2, $ACC2
vpmuludq 32*4+512($a_ptr), $B, $T0
vpaddq $OVERFLOW, $ACC3, $ACC3
vpmuludq 32*5+512($a_ptr), $B, $OVERFLOW
vpaddq $T0, $ACC4, $ACC4
vpmuludq 32*6+512($a_ptr), $B, $T0
vpaddq $OVERFLOW, $ACC5, $ACC5
vpmuludq 32*7+512($a_ptr), $B, $OVERFLOW
vpaddq $T0, $ACC6, $ACC6
# Skip some multiplications, optimizing for the constant poly
vpmuludq $AND_MASK, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC7
vpmuludq 32*8+512($a_ptr), $B, $ACC8
vpaddq $T0, $ACC0, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
.byte 0x67
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $OVERFLOW
.byte 0x67
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $T0
vpaddq $OVERFLOW, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $OVERFLOW
vpaddq $T0, $ACC7, $ACC6
vpaddq $OVERFLOW, $ACC8, $ACC7
dec $itr
jnz .Lavx2_mul_x4_loop
vpxor $ACC8, $ACC8, $ACC8
ret
.size avx2_mul_x4,.-avx2_mul_x4
# Function optimized for the constant 1
################################################################################
# void avx2_mul_by1_x4(void* RESULTx4, void *Ax4);
.type avx2_mul_by1_x4,\@abi-omnipotent
.align 32
avx2_mul_by1_x4:
lea .LAVX2_POLY(%rip), %rax
vpxor $ACC0, $ACC0, $ACC0
vpxor $ACC1, $ACC1, $ACC1
vpxor $ACC2, $ACC2, $ACC2
vpxor $ACC3, $ACC3, $ACC3
vpxor $ACC4, $ACC4, $ACC4
vpxor $ACC5, $ACC5, $ACC5
vpxor $ACC6, $ACC6, $ACC6
vpxor $ACC7, $ACC7, $ACC7
vpxor $ACC8, $ACC8, $ACC8
vmovdqa 32*3+.LONE(%rip), %ymm14
vmovdqa 32*7+.LONE(%rip), %ymm15
mov $n_digits, $itr
jmp .Lavx2_mul_by1_x4_loop
.align 32
.Lavx2_mul_by1_x4_loop:
vmovdqa 32*0($a_ptr), $B
.byte 0x48,0x8d,0xb6,0x20,0,0,0 # lea 32*1($a_ptr), $a_ptr
vpsllq \$5, $B, $OVERFLOW
vpmuludq %ymm14, $B, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC3
.byte 0x67
vpmuludq $AND_MASK, $B, $T0
vpand $AND_MASK, $ACC0, $Y
vpaddq $T0, $ACC4, $ACC4
vpaddq $T0, $ACC5, $ACC5
vpaddq $T0, $ACC6, $ACC6
vpsllq \$23, $B, $T0
.byte 0x67,0x67
vpmuludq %ymm15, $B, $OVERFLOW
vpsubq $T0, $ACC6, $ACC6
vpmuludq $AND_MASK, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC7
vpaddq $T0, $ACC0, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
.byte 0x67,0x67
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $OVERFLOW
vmovdqa $ACC5, $ACC4
vpmuludq 32*7(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC6, $ACC5
vpaddq $T0, $ACC7, $ACC6
vpmuludq 32*8(%rax), $Y, $ACC7
dec $itr
jnz .Lavx2_mul_by1_x4_loop
ret
.size avx2_mul_by1_x4,.-avx2_mul_by1_x4
################################################################################
# void avx2_sqr_x4(void* RESULTx4, void *Ax4, void *Bx4);
.type avx2_sqr_x4,\@abi-omnipotent
.align 32
avx2_sqr_x4:
lea .LAVX2_POLY(%rip), %rax
vmovdqa 32*7(%rax), %ymm14
vmovdqa 32*8(%rax), %ymm15
vmovdqa 32*0($a_ptr), $B
vmovdqa 32*1($a_ptr), $ACC1
vmovdqa 32*2($a_ptr), $ACC2
vmovdqa 32*3($a_ptr), $ACC3
vmovdqa 32*4($a_ptr), $ACC4
vmovdqa 32*5($a_ptr), $ACC5
vmovdqa 32*6($a_ptr), $ACC6
vmovdqa 32*7($a_ptr), $ACC7
vpaddq $ACC1, $ACC1, $ACC1 # 2*$ACC0..7
vmovdqa 32*8($a_ptr), $ACC8
vpaddq $ACC2, $ACC2, $ACC2
vmovdqa $ACC1, 32*0(%rcx)
vpaddq $ACC3, $ACC3, $ACC3
vmovdqa $ACC2, 32*1(%rcx)
vpaddq $ACC4, $ACC4, $ACC4
vmovdqa $ACC3, 32*2(%rcx)
vpaddq $ACC5, $ACC5, $ACC5
vmovdqa $ACC4, 32*3(%rcx)
vpaddq $ACC6, $ACC6, $ACC6
vmovdqa $ACC5, 32*4(%rcx)
vpaddq $ACC7, $ACC7, $ACC7
vmovdqa $ACC6, 32*5(%rcx)
vpaddq $ACC8, $ACC8, $ACC8
vmovdqa $ACC7, 32*6(%rcx)
vmovdqa $ACC8, 32*7(%rcx)
#itr 1
vpmuludq $B, $B, $ACC0
vpmuludq $B, $ACC1, $ACC1
vpand $AND_MASK, $ACC0, $Y
vpmuludq $B, $ACC2, $ACC2
vpmuludq $B, $ACC3, $ACC3
vpmuludq $B, $ACC4, $ACC4
vpmuludq $B, $ACC5, $ACC5
vpmuludq $B, $ACC6, $ACC6
vpmuludq $AND_MASK, $Y, $T0
vpmuludq $B, $ACC7, $ACC7
vpmuludq $B, $ACC8, $ACC8
vmovdqa 32*1($a_ptr), $B
vpaddq $T0, $ACC0, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $T0
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $OVERFLOW
vpaddq $T0, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC6
vpaddq $T0, $ACC8, $ACC7
#itr 2
vpmuludq $B, $B, $OVERFLOW
vpand $AND_MASK, $ACC0, $Y
vpmuludq 32*1(%rcx), $B, $T0
vpaddq $OVERFLOW, $ACC1, $ACC1
vpmuludq 32*2(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC2, $ACC2
vpmuludq 32*3(%rcx), $B, $T0
vpaddq $OVERFLOW, $ACC3, $ACC3
vpmuludq 32*4(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC4, $ACC4
vpmuludq 32*5(%rcx), $B, $T0
vpaddq $OVERFLOW, $ACC5, $ACC5
vpmuludq 32*6(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC6, $ACC6
vpmuludq $AND_MASK, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC7
vpmuludq 32*7(%rcx), $B, $ACC8
vmovdqa 32*2($a_ptr), $B
vpaddq $T0, $ACC0, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $T0
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $OVERFLOW
vpaddq $T0, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC6
vpaddq $T0, $ACC8, $ACC7
#itr 3
vpmuludq $B, $B, $T0
vpand $AND_MASK, $ACC0, $Y
vpmuludq 32*2(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC2, $ACC2
vpmuludq 32*3(%rcx), $B, $T0
vpaddq $OVERFLOW, $ACC3, $ACC3
vpmuludq 32*4(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC4, $ACC4
vpmuludq 32*5(%rcx), $B, $T0
vpaddq $OVERFLOW, $ACC5, $ACC5
vpmuludq 32*6(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC6, $ACC6
vpmuludq $AND_MASK, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC7
vpmuludq 32*7(%rcx), $B, $ACC8
vmovdqa 32*3($a_ptr), $B
vpaddq $T0, $ACC0, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $T0
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $OVERFLOW
vpaddq $T0, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $T0
vpand $AND_MASK, $ACC0, $Y
vpaddq $OVERFLOW, $ACC7, $ACC6
vpaddq $T0, $ACC8, $ACC7
#itr 4
vpmuludq $B, $B, $OVERFLOW
vpmuludq 32*3(%rcx), $B, $T0
vpaddq $OVERFLOW, $ACC3, $ACC3
vpmuludq 32*4(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC4, $ACC4
vpmuludq 32*5(%rcx), $B, $T0
vpaddq $OVERFLOW, $ACC5, $ACC5
vpmuludq 32*6(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC6, $ACC6
vpmuludq $AND_MASK, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC7
vpmuludq 32*7(%rcx), $B, $ACC8
vmovdqa 32*4($a_ptr), $B
vpaddq $T0, $ACC0, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $T0
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $OVERFLOW
vpaddq $T0, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $T0
vpand $AND_MASK, $ACC0, $Y
vpaddq $OVERFLOW, $ACC7, $ACC6
vpaddq $T0, $ACC8, $ACC7
#itr 5
vpmuludq $B, $B, $T0
vpmuludq 32*4(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC4, $ACC4
vpmuludq 32*5(%rcx), $B, $T0
vpaddq $OVERFLOW, $ACC5, $ACC5
vpmuludq 32*6(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC6, $ACC6
vpmuludq $AND_MASK, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC7
vpmuludq 32*7(%rcx), $B, $ACC8
vmovdqa 32*5($a_ptr), $B
vpaddq $T0, $ACC0, $OVERFLOW
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3+.LAVX2_POLY(%rip), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $T0
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $OVERFLOW
vpaddq $T0, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $T0
vpand $AND_MASK, $ACC0, $Y
vpaddq $OVERFLOW, $ACC7, $ACC6
vpaddq $T0, $ACC8, $ACC7
#itr 6
vpmuludq $B, $B, $OVERFLOW
vpmuludq 32*5(%rcx), $B, $T0
vpaddq $OVERFLOW, $ACC5, $ACC5
vpmuludq 32*6(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC6, $ACC6
vpmuludq $AND_MASK, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC7
vpmuludq 32*7(%rcx), $B, $ACC8
vmovdqa 32*6($a_ptr), $B
vpaddq $T0, $ACC0, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $T0
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $OVERFLOW
vpaddq $T0, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $T0
vpand $AND_MASK, $ACC0, $Y
vpaddq $OVERFLOW, $ACC7, $ACC6
vpaddq $T0, $ACC8, $ACC7
#itr 7
vpmuludq $B, $B, $T0
vpmuludq 32*6(%rcx), $B, $OVERFLOW
vpaddq $T0, $ACC6, $ACC6
vpmuludq $AND_MASK, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC7
vpmuludq 32*7(%rcx), $B, $ACC8
vmovdqa 32*7($a_ptr), $B
vpaddq $T0, $ACC0, $OVERFLOW
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $T0
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $OVERFLOW
vpaddq $T0, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $T0
vpand $AND_MASK, $ACC0, $Y
vpaddq $OVERFLOW, $ACC7, $ACC6
vpaddq $T0, $ACC8, $ACC7
#itr 8
vpmuludq $B, $B, $OVERFLOW
vpmuludq $AND_MASK, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC7
vpmuludq 32*7(%rcx), $B, $ACC8
vmovdqa 32*8($a_ptr), $B
vpaddq $T0, $ACC0, $OVERFLOW
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $T0
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $OVERFLOW
vpaddq $T0, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $T0
vpand $AND_MASK, $ACC0, $Y
vpaddq $OVERFLOW, $ACC7, $ACC6
vpaddq $T0, $ACC8, $ACC7
#itr 9
vpmuludq $B, $B, $ACC8
vpmuludq $AND_MASK, $Y, $T0
vpaddq $T0, $ACC0, $OVERFLOW
vpsrlq $digit_size, $OVERFLOW, $OVERFLOW
vpaddq $T0, $ACC1, $ACC0
vpaddq $T0, $ACC2, $ACC1
vpmuludq 32*3(%rax), $Y, $T0
vpaddq $OVERFLOW, $ACC0, $ACC0
vpaddq $T0, $ACC3, $ACC2
vmovdqa $ACC4, $ACC3
vpsllq \$18, $Y, $T0
vmovdqa $ACC5, $ACC4
vpmuludq %ymm14, $Y, $OVERFLOW
vpaddq $T0, $ACC6, $ACC5
vpmuludq %ymm15, $Y, $T0
vpaddq $OVERFLOW, $ACC7, $ACC6
vpaddq $T0, $ACC8, $ACC7
vpxor $ACC8, $ACC8, $ACC8
ret
.size avx2_sqr_x4,.-avx2_sqr_x4
################################################################################
# void avx2_sub_x4(void* RESULTx4, void *Ax4, void *Bx4);
.type avx2_sub_x4,\@abi-omnipotent
.align 32
avx2_sub_x4:
vmovdqa 32*0($a_ptr), $ACC0
lea 160($a_ptr), $a_ptr
lea .LAVX2_POLY_x8+128(%rip), %rax
lea 128($b_ptr), $b_ptr
vmovdqa 32*1-160($a_ptr), $ACC1
vmovdqa 32*2-160($a_ptr), $ACC2
vmovdqa 32*3-160($a_ptr), $ACC3
vmovdqa 32*4-160($a_ptr), $ACC4
vmovdqa 32*5-160($a_ptr), $ACC5
vmovdqa 32*6-160($a_ptr), $ACC6
vmovdqa 32*7-160($a_ptr), $ACC7
vmovdqa 32*8-160($a_ptr), $ACC8
vpaddq 32*0-128(%rax), $ACC0, $ACC0
vpaddq 32*1-128(%rax), $ACC1, $ACC1
vpaddq 32*2-128(%rax), $ACC2, $ACC2
vpaddq 32*3-128(%rax), $ACC3, $ACC3
vpaddq 32*4-128(%rax), $ACC4, $ACC4
vpaddq 32*5-128(%rax), $ACC5, $ACC5
vpaddq 32*6-128(%rax), $ACC6, $ACC6
vpaddq 32*7-128(%rax), $ACC7, $ACC7
vpaddq 32*8-128(%rax), $ACC8, $ACC8
vpsubq 32*0-128($b_ptr), $ACC0, $ACC0
vpsubq 32*1-128($b_ptr), $ACC1, $ACC1
vpsubq 32*2-128($b_ptr), $ACC2, $ACC2
vpsubq 32*3-128($b_ptr), $ACC3, $ACC3
vpsubq 32*4-128($b_ptr), $ACC4, $ACC4
vpsubq 32*5-128($b_ptr), $ACC5, $ACC5
vpsubq 32*6-128($b_ptr), $ACC6, $ACC6
vpsubq 32*7-128($b_ptr), $ACC7, $ACC7
vpsubq 32*8-128($b_ptr), $ACC8, $ACC8
ret
.size avx2_sub_x4,.-avx2_sub_x4
.type avx2_select_n_store,\@abi-omnipotent
.align 32
avx2_select_n_store:
vmovdqa `8+32*9*8`(%rsp), $Y
vpor `8+32*9*8+32`(%rsp), $Y, $Y
vpandn $ACC0, $Y, $ACC0
vpandn $ACC1, $Y, $ACC1
vpandn $ACC2, $Y, $ACC2
vpandn $ACC3, $Y, $ACC3
vpandn $ACC4, $Y, $ACC4
vpandn $ACC5, $Y, $ACC5
vpandn $ACC6, $Y, $ACC6
vmovdqa `8+32*9*8+32`(%rsp), $B
vpandn $ACC7, $Y, $ACC7
vpandn `8+32*9*8`(%rsp), $B, $B
vpandn $ACC8, $Y, $ACC8
vpand 32*0(%rsi), $B, $T0
lea 160(%rsi), %rax
vpand 32*1(%rsi), $B, $Y
vpxor $T0, $ACC0, $ACC0
vpand 32*2(%rsi), $B, $T0
vpxor $Y, $ACC1, $ACC1
vpand 32*3(%rsi), $B, $Y
vpxor $T0, $ACC2, $ACC2
vpand 32*4-160(%rax), $B, $T0
vpxor $Y, $ACC3, $ACC3
vpand 32*5-160(%rax), $B, $Y
vpxor $T0, $ACC4, $ACC4
vpand 32*6-160(%rax), $B, $T0
vpxor $Y, $ACC5, $ACC5
vpand 32*7-160(%rax), $B, $Y
vpxor $T0, $ACC6, $ACC6
vpand 32*8-160(%rax), $B, $T0
vmovdqa `8+32*9*8+32`(%rsp), $B
vpxor $Y, $ACC7, $ACC7
vpand 32*0(%rdx), $B, $Y
lea 160(%rdx), %rax
vpxor $T0, $ACC8, $ACC8
vpand 32*1(%rdx), $B, $T0
vpxor $Y, $ACC0, $ACC0
vpand 32*2(%rdx), $B, $Y
vpxor $T0, $ACC1, $ACC1
vpand 32*3(%rdx), $B, $T0
vpxor $Y, $ACC2, $ACC2
vpand 32*4-160(%rax), $B, $Y
vpxor $T0, $ACC3, $ACC3
vpand 32*5-160(%rax), $B, $T0
vpxor $Y, $ACC4, $ACC4
vpand 32*6-160(%rax), $B, $Y
vpxor $T0, $ACC5, $ACC5
vpand 32*7-160(%rax), $B, $T0
vpxor $Y, $ACC6, $ACC6
vpand 32*8-160(%rax), $B, $Y
vpxor $T0, $ACC7, $ACC7
vpxor $Y, $ACC8, $ACC8
`&STORE`
ret
.size avx2_select_n_store,.-avx2_select_n_store
___
$code.=<<___ if (0); # inlined
################################################################################
# void avx2_mul_by2_x4(void* RESULTx4, void *Ax4);
.type avx2_mul_by2_x4,\@abi-omnipotent
.align 32
avx2_mul_by2_x4:
vmovdqa 32*0($a_ptr), $ACC0
lea 160($a_ptr), %rax
vmovdqa 32*1($a_ptr), $ACC1
vmovdqa 32*2($a_ptr), $ACC2
vmovdqa 32*3($a_ptr), $ACC3
vmovdqa 32*4-160(%rax), $ACC4
vmovdqa 32*5-160(%rax), $ACC5
vmovdqa 32*6-160(%rax), $ACC6
vmovdqa 32*7-160(%rax), $ACC7
vmovdqa 32*8-160(%rax), $ACC8
vpaddq $ACC0, $ACC0, $ACC0
vpaddq $ACC1, $ACC1, $ACC1
vpaddq $ACC2, $ACC2, $ACC2
vpaddq $ACC3, $ACC3, $ACC3
vpaddq $ACC4, $ACC4, $ACC4
vpaddq $ACC5, $ACC5, $ACC5
vpaddq $ACC6, $ACC6, $ACC6
vpaddq $ACC7, $ACC7, $ACC7
vpaddq $ACC8, $ACC8, $ACC8
ret
.size avx2_mul_by2_x4,.-avx2_mul_by2_x4
___
my ($r_ptr_in,$a_ptr_in,$b_ptr_in)=("%rdi","%rsi","%rdx");
my ($r_ptr,$a_ptr,$b_ptr)=("%r8","%r9","%r10");
$code.=<<___;
################################################################################
# void ecp_nistz256_avx2_point_add_affine_x4(void* RESULTx4, void *Ax4, void *Bx4);
.globl ecp_nistz256_avx2_point_add_affine_x4
.type ecp_nistz256_avx2_point_add_affine_x4,\@function,3
.align 32
ecp_nistz256_avx2_point_add_affine_x4:
mov %rsp, %rax
push %rbp
vzeroupper
___
$code.=<<___ if ($win64);
lea -16*10(%rsp), %rsp
vmovaps %xmm6, -8-16*10(%rax)
vmovaps %xmm7, -8-16*9(%rax)
vmovaps %xmm8, -8-16*8(%rax)
vmovaps %xmm9, -8-16*7(%rax)
vmovaps %xmm10, -8-16*6(%rax)
vmovaps %xmm11, -8-16*5(%rax)
vmovaps %xmm12, -8-16*4(%rax)
vmovaps %xmm13, -8-16*3(%rax)
vmovaps %xmm14, -8-16*2(%rax)
vmovaps %xmm15, -8-16*1(%rax)
___
$code.=<<___;
lea -8(%rax), %rbp
# Result + 32*0 = Result.X
# Result + 32*9 = Result.Y
# Result + 32*18 = Result.Z
# A + 32*0 = A.X
# A + 32*9 = A.Y
# A + 32*18 = A.Z
# B + 32*0 = B.X
# B + 32*9 = B.Y
sub \$`32*9*8+32*2+32*8`, %rsp
and \$-64, %rsp
mov $r_ptr_in, $r_ptr
mov $a_ptr_in, $a_ptr
mov $b_ptr_in, $b_ptr
vmovdqa 32*0($a_ptr_in), %ymm0
vmovdqa .LAVX2_AND_MASK(%rip), $AND_MASK
vpxor %ymm1, %ymm1, %ymm1
lea 256($a_ptr_in), %rax # size optimization
vpor 32*1($a_ptr_in), %ymm0, %ymm0
vpor 32*2($a_ptr_in), %ymm0, %ymm0
vpor 32*3($a_ptr_in), %ymm0, %ymm0
vpor 32*4-256(%rax), %ymm0, %ymm0
lea 256(%rax), %rcx # size optimization
vpor 32*5-256(%rax), %ymm0, %ymm0
vpor 32*6-256(%rax), %ymm0, %ymm0
vpor 32*7-256(%rax), %ymm0, %ymm0
vpor 32*8-256(%rax), %ymm0, %ymm0
vpor 32*9-256(%rax), %ymm0, %ymm0
vpor 32*10-256(%rax), %ymm0, %ymm0
vpor 32*11-256(%rax), %ymm0, %ymm0
vpor 32*12-512(%rcx), %ymm0, %ymm0
vpor 32*13-512(%rcx), %ymm0, %ymm0
vpor 32*14-512(%rcx), %ymm0, %ymm0
vpor 32*15-512(%rcx), %ymm0, %ymm0
vpor 32*16-512(%rcx), %ymm0, %ymm0
vpor 32*17-512(%rcx), %ymm0, %ymm0
vpcmpeqq %ymm1, %ymm0, %ymm0
vmovdqa %ymm0, `32*9*8`(%rsp)
vpxor %ymm1, %ymm1, %ymm1
vmovdqa 32*0($b_ptr), %ymm0
lea 256($b_ptr), %rax # size optimization
vpor 32*1($b_ptr), %ymm0, %ymm0
vpor 32*2($b_ptr), %ymm0, %ymm0
vpor 32*3($b_ptr), %ymm0, %ymm0
vpor 32*4-256(%rax), %ymm0, %ymm0
lea 256(%rax), %rcx # size optimization
vpor 32*5-256(%rax), %ymm0, %ymm0
vpor 32*6-256(%rax), %ymm0, %ymm0
vpor 32*7-256(%rax), %ymm0, %ymm0
vpor 32*8-256(%rax), %ymm0, %ymm0
vpor 32*9-256(%rax), %ymm0, %ymm0
vpor 32*10-256(%rax), %ymm0, %ymm0
vpor 32*11-256(%rax), %ymm0, %ymm0
vpor 32*12-512(%rcx), %ymm0, %ymm0
vpor 32*13-512(%rcx), %ymm0, %ymm0
vpor 32*14-512(%rcx), %ymm0, %ymm0
vpor 32*15-512(%rcx), %ymm0, %ymm0
vpor 32*16-512(%rcx), %ymm0, %ymm0
vpor 32*17-512(%rcx), %ymm0, %ymm0
vpcmpeqq %ymm1, %ymm0, %ymm0
vmovdqa %ymm0, `32*9*8+32`(%rsp)
# Z1^2 = Z1*Z1
lea `32*9*2`($a_ptr), %rsi
lea `32*9*2`(%rsp), %rdi
lea `32*9*8+32*2`(%rsp), %rcx # temporary vector
call avx2_sqr_x4
call avx2_normalize_n_store
# U2 = X2*Z1^2
lea `32*9*0`($b_ptr), %rsi
lea `32*9*2`(%rsp), %rdx
lea `32*9*0`(%rsp), %rdi
call avx2_mul_x4
#call avx2_normalize
`&STORE`
# S2 = Z1*Z1^2 = Z1^3
lea `32*9*2`($a_ptr), %rsi
lea `32*9*2`(%rsp), %rdx
lea `32*9*1`(%rsp), %rdi
call avx2_mul_x4
call avx2_normalize_n_store
# S2 = S2*Y2 = Y2*Z1^3
lea `32*9*1`($b_ptr), %rsi
lea `32*9*1`(%rsp), %rdx
lea `32*9*1`(%rsp), %rdi
call avx2_mul_x4
call avx2_normalize_n_store
# H = U2 - U1 = U2 - X1
lea `32*9*0`(%rsp), %rsi
lea `32*9*0`($a_ptr), %rdx
lea `32*9*3`(%rsp), %rdi
call avx2_sub_x4
call avx2_normalize_n_store
# R = S2 - S1 = S2 - Y1
lea `32*9*1`(%rsp), %rsi
lea `32*9*1`($a_ptr), %rdx
lea `32*9*4`(%rsp), %rdi
call avx2_sub_x4
call avx2_normalize_n_store
# Z3 = H*Z1*Z2
lea `32*9*3`(%rsp), %rsi
lea `32*9*2`($a_ptr), %rdx
lea `32*9*2`($r_ptr), %rdi
call avx2_mul_x4
call avx2_normalize
lea .LONE(%rip), %rsi
lea `32*9*2`($a_ptr), %rdx
call avx2_select_n_store
# R^2 = R^2
lea `32*9*4`(%rsp), %rsi
lea `32*9*6`(%rsp), %rdi
lea `32*9*8+32*2`(%rsp), %rcx # temporary vector
call avx2_sqr_x4
call avx2_normalize_n_store
# H^2 = H^2
lea `32*9*3`(%rsp), %rsi
lea `32*9*5`(%rsp), %rdi
call avx2_sqr_x4
call avx2_normalize_n_store
# H^3 = H^2*H
lea `32*9*3`(%rsp), %rsi
lea `32*9*5`(%rsp), %rdx
lea `32*9*7`(%rsp), %rdi
call avx2_mul_x4
call avx2_normalize_n_store
# U2 = U1*H^2
lea `32*9*0`($a_ptr), %rsi
lea `32*9*5`(%rsp), %rdx
lea `32*9*0`(%rsp), %rdi
call avx2_mul_x4
#call avx2_normalize
`&STORE`
# Hsqr = U2*2
#lea 32*9*0(%rsp), %rsi
#lea 32*9*5(%rsp), %rdi
#call avx2_mul_by2_x4
vpaddq $ACC0, $ACC0, $ACC0 # inlined avx2_mul_by2_x4
lea `32*9*5`(%rsp), %rdi
vpaddq $ACC1, $ACC1, $ACC1
vpaddq $ACC2, $ACC2, $ACC2
vpaddq $ACC3, $ACC3, $ACC3
vpaddq $ACC4, $ACC4, $ACC4
vpaddq $ACC5, $ACC5, $ACC5
vpaddq $ACC6, $ACC6, $ACC6
vpaddq $ACC7, $ACC7, $ACC7
vpaddq $ACC8, $ACC8, $ACC8
call avx2_normalize_n_store
# X3 = R^2 - H^3
#lea 32*9*6(%rsp), %rsi
#lea 32*9*7(%rsp), %rdx
#lea 32*9*5(%rsp), %rcx
#lea 32*9*0($r_ptr), %rdi
#call avx2_sub_x4
#NORMALIZE
#STORE
# X3 = X3 - U2*2
#lea 32*9*0($r_ptr), %rsi
#lea 32*9*0($r_ptr), %rdi
#call avx2_sub_x4
#NORMALIZE
#STORE
lea `32*9*6+128`(%rsp), %rsi
lea .LAVX2_POLY_x2+128(%rip), %rax
lea `32*9*7+128`(%rsp), %rdx
lea `32*9*5+128`(%rsp), %rcx
lea `32*9*0`($r_ptr), %rdi
vmovdqa 32*0-128(%rsi), $ACC0
vmovdqa 32*1-128(%rsi), $ACC1
vmovdqa 32*2-128(%rsi), $ACC2
vmovdqa 32*3-128(%rsi), $ACC3
vmovdqa 32*4-128(%rsi), $ACC4
vmovdqa 32*5-128(%rsi), $ACC5
vmovdqa 32*6-128(%rsi), $ACC6
vmovdqa 32*7-128(%rsi), $ACC7
vmovdqa 32*8-128(%rsi), $ACC8
vpaddq 32*0-128(%rax), $ACC0, $ACC0
vpaddq 32*1-128(%rax), $ACC1, $ACC1
vpaddq 32*2-128(%rax), $ACC2, $ACC2
vpaddq 32*3-128(%rax), $ACC3, $ACC3
vpaddq 32*4-128(%rax), $ACC4, $ACC4
vpaddq 32*5-128(%rax), $ACC5, $ACC5
vpaddq 32*6-128(%rax), $ACC6, $ACC6
vpaddq 32*7-128(%rax), $ACC7, $ACC7
vpaddq 32*8-128(%rax), $ACC8, $ACC8
vpsubq 32*0-128(%rdx), $ACC0, $ACC0
vpsubq 32*1-128(%rdx), $ACC1, $ACC1
vpsubq 32*2-128(%rdx), $ACC2, $ACC2
vpsubq 32*3-128(%rdx), $ACC3, $ACC3
vpsubq 32*4-128(%rdx), $ACC4, $ACC4
vpsubq 32*5-128(%rdx), $ACC5, $ACC5
vpsubq 32*6-128(%rdx), $ACC6, $ACC6
vpsubq 32*7-128(%rdx), $ACC7, $ACC7
vpsubq 32*8-128(%rdx), $ACC8, $ACC8
vpsubq 32*0-128(%rcx), $ACC0, $ACC0
vpsubq 32*1-128(%rcx), $ACC1, $ACC1
vpsubq 32*2-128(%rcx), $ACC2, $ACC2
vpsubq 32*3-128(%rcx), $ACC3, $ACC3
vpsubq 32*4-128(%rcx), $ACC4, $ACC4
vpsubq 32*5-128(%rcx), $ACC5, $ACC5
vpsubq 32*6-128(%rcx), $ACC6, $ACC6
vpsubq 32*7-128(%rcx), $ACC7, $ACC7
vpsubq 32*8-128(%rcx), $ACC8, $ACC8
call avx2_normalize
lea 32*0($b_ptr), %rsi
lea 32*0($a_ptr), %rdx
call avx2_select_n_store
# H = U2 - X3
lea `32*9*0`(%rsp), %rsi
lea `32*9*0`($r_ptr), %rdx
lea `32*9*3`(%rsp), %rdi
call avx2_sub_x4
call avx2_normalize_n_store
#
lea `32*9*3`(%rsp), %rsi
lea `32*9*4`(%rsp), %rdx
lea `32*9*3`(%rsp), %rdi
call avx2_mul_x4
call avx2_normalize_n_store
#
lea `32*9*7`(%rsp), %rsi
lea `32*9*1`($a_ptr), %rdx
lea `32*9*1`(%rsp), %rdi
call avx2_mul_x4
call avx2_normalize_n_store
#
lea `32*9*3`(%rsp), %rsi
lea `32*9*1`(%rsp), %rdx
lea `32*9*1`($r_ptr), %rdi
call avx2_sub_x4
call avx2_normalize
lea 32*9($b_ptr), %rsi
lea 32*9($a_ptr), %rdx
call avx2_select_n_store
#lea 32*9*0($r_ptr), %rsi
#lea 32*9*0($r_ptr), %rdi
#call avx2_mul_by1_x4
#NORMALIZE
#STORE
lea `32*9*1`($r_ptr), %rsi
lea `32*9*1`($r_ptr), %rdi
call avx2_mul_by1_x4
call avx2_normalize_n_store
vzeroupper
___
$code.=<<___ if ($win64);
movaps %xmm6, -16*10(%rbp)
movaps %xmm7, -16*9(%rbp)
movaps %xmm8, -16*8(%rbp)
movaps %xmm9, -16*7(%rbp)
movaps %xmm10, -16*6(%rbp)
movaps %xmm11, -16*5(%rbp)
movaps %xmm12, -16*4(%rbp)
movaps %xmm13, -16*3(%rbp)
movaps %xmm14, -16*2(%rbp)
movaps %xmm15, -16*1(%rbp)
___
$code.=<<___;
mov %rbp, %rsp
pop %rbp
ret
.size ecp_nistz256_avx2_point_add_affine_x4,.-ecp_nistz256_avx2_point_add_affine_x4
################################################################################
# void ecp_nistz256_avx2_point_add_affines_x4(void* RESULTx4, void *Ax4, void *Bx4);
.globl ecp_nistz256_avx2_point_add_affines_x4
.type ecp_nistz256_avx2_point_add_affines_x4,\@function,3
.align 32
ecp_nistz256_avx2_point_add_affines_x4:
mov %rsp, %rax
push %rbp
vzeroupper
___
$code.=<<___ if ($win64);
lea -16*10(%rsp), %rsp
vmovaps %xmm6, -8-16*10(%rax)
vmovaps %xmm7, -8-16*9(%rax)
vmovaps %xmm8, -8-16*8(%rax)
vmovaps %xmm9, -8-16*7(%rax)
vmovaps %xmm10, -8-16*6(%rax)
vmovaps %xmm11, -8-16*5(%rax)
vmovaps %xmm12, -8-16*4(%rax)
vmovaps %xmm13, -8-16*3(%rax)
vmovaps %xmm14, -8-16*2(%rax)
vmovaps %xmm15, -8-16*1(%rax)
___
$code.=<<___;
lea -8(%rax), %rbp
# Result + 32*0 = Result.X
# Result + 32*9 = Result.Y
# Result + 32*18 = Result.Z
# A + 32*0 = A.X
# A + 32*9 = A.Y
# B + 32*0 = B.X
# B + 32*9 = B.Y
sub \$`32*9*8+32*2+32*8`, %rsp
and \$-64, %rsp
mov $r_ptr_in, $r_ptr
mov $a_ptr_in, $a_ptr
mov $b_ptr_in, $b_ptr
vmovdqa 32*0($a_ptr_in), %ymm0
vmovdqa .LAVX2_AND_MASK(%rip), $AND_MASK
vpxor %ymm1, %ymm1, %ymm1
lea 256($a_ptr_in), %rax # size optimization
vpor 32*1($a_ptr_in), %ymm0, %ymm0
vpor 32*2($a_ptr_in), %ymm0, %ymm0
vpor 32*3($a_ptr_in), %ymm0, %ymm0
vpor 32*4-256(%rax), %ymm0, %ymm0
lea 256(%rax), %rcx # size optimization
vpor 32*5-256(%rax), %ymm0, %ymm0
vpor 32*6-256(%rax), %ymm0, %ymm0
vpor 32*7-256(%rax), %ymm0, %ymm0
vpor 32*8-256(%rax), %ymm0, %ymm0
vpor 32*9-256(%rax), %ymm0, %ymm0
vpor 32*10-256(%rax), %ymm0, %ymm0
vpor 32*11-256(%rax), %ymm0, %ymm0
vpor 32*12-512(%rcx), %ymm0, %ymm0
vpor 32*13-512(%rcx), %ymm0, %ymm0
vpor 32*14-512(%rcx), %ymm0, %ymm0
vpor 32*15-512(%rcx), %ymm0, %ymm0
vpor 32*16-512(%rcx), %ymm0, %ymm0
vpor 32*17-512(%rcx), %ymm0, %ymm0
vpcmpeqq %ymm1, %ymm0, %ymm0
vmovdqa %ymm0, `32*9*8`(%rsp)
vpxor %ymm1, %ymm1, %ymm1
vmovdqa 32*0($b_ptr), %ymm0
lea 256($b_ptr), %rax # size optimization
vpor 32*1($b_ptr), %ymm0, %ymm0
vpor 32*2($b_ptr), %ymm0, %ymm0
vpor 32*3($b_ptr), %ymm0, %ymm0
vpor 32*4-256(%rax), %ymm0, %ymm0
lea 256(%rax), %rcx # size optimization
vpor 32*5-256(%rax), %ymm0, %ymm0
vpor 32*6-256(%rax), %ymm0, %ymm0
vpor 32*7-256(%rax), %ymm0, %ymm0
vpor 32*8-256(%rax), %ymm0, %ymm0
vpor 32*9-256(%rax), %ymm0, %ymm0
vpor 32*10-256(%rax), %ymm0, %ymm0
vpor 32*11-256(%rax), %ymm0, %ymm0
vpor 32*12-512(%rcx), %ymm0, %ymm0
vpor 32*13-512(%rcx), %ymm0, %ymm0
vpor 32*14-512(%rcx), %ymm0, %ymm0
vpor 32*15-512(%rcx), %ymm0, %ymm0
vpor 32*16-512(%rcx), %ymm0, %ymm0
vpor 32*17-512(%rcx), %ymm0, %ymm0
vpcmpeqq %ymm1, %ymm0, %ymm0
vmovdqa %ymm0, `32*9*8+32`(%rsp)
# H = U2 - U1 = X2 - X1
lea `32*9*0`($b_ptr), %rsi
lea `32*9*0`($a_ptr), %rdx
lea `32*9*3`(%rsp), %rdi
call avx2_sub_x4
call avx2_normalize_n_store
# R = S2 - S1 = Y2 - Y1
lea `32*9*1`($b_ptr), %rsi
lea `32*9*1`($a_ptr), %rdx
lea `32*9*4`(%rsp), %rdi
call avx2_sub_x4
call avx2_normalize_n_store
# Z3 = H*Z1*Z2 = H
lea `32*9*3`(%rsp), %rsi
lea `32*9*2`($r_ptr), %rdi
call avx2_mul_by1_x4
call avx2_normalize
vmovdqa `32*9*8`(%rsp), $B
vpor `32*9*8+32`(%rsp), $B, $B
vpandn $ACC0, $B, $ACC0
lea .LONE+128(%rip), %rax
vpandn $ACC1, $B, $ACC1
vpandn $ACC2, $B, $ACC2
vpandn $ACC3, $B, $ACC3
vpandn $ACC4, $B, $ACC4
vpandn $ACC5, $B, $ACC5
vpandn $ACC6, $B, $ACC6
vpandn $ACC7, $B, $ACC7
vpand 32*0-128(%rax), $B, $T0
vpandn $ACC8, $B, $ACC8
vpand 32*1-128(%rax), $B, $Y
vpxor $T0, $ACC0, $ACC0
vpand 32*2-128(%rax), $B, $T0
vpxor $Y, $ACC1, $ACC1
vpand 32*3-128(%rax), $B, $Y
vpxor $T0, $ACC2, $ACC2
vpand 32*4-128(%rax), $B, $T0
vpxor $Y, $ACC3, $ACC3
vpand 32*5-128(%rax), $B, $Y
vpxor $T0, $ACC4, $ACC4
vpand 32*6-128(%rax), $B, $T0
vpxor $Y, $ACC5, $ACC5
vpand 32*7-128(%rax), $B, $Y
vpxor $T0, $ACC6, $ACC6
vpand 32*8-128(%rax), $B, $T0
vpxor $Y, $ACC7, $ACC7
vpxor $T0, $ACC8, $ACC8
`&STORE`
# R^2 = R^2
lea `32*9*4`(%rsp), %rsi
lea `32*9*6`(%rsp), %rdi
lea `32*9*8+32*2`(%rsp), %rcx # temporary vector
call avx2_sqr_x4
call avx2_normalize_n_store
# H^2 = H^2
lea `32*9*3`(%rsp), %rsi
lea `32*9*5`(%rsp), %rdi
call avx2_sqr_x4
call avx2_normalize_n_store
# H^3 = H^2*H
lea `32*9*3`(%rsp), %rsi
lea `32*9*5`(%rsp), %rdx
lea `32*9*7`(%rsp), %rdi
call avx2_mul_x4
call avx2_normalize_n_store
# U2 = U1*H^2
lea `32*9*0`($a_ptr), %rsi
lea `32*9*5`(%rsp), %rdx
lea `32*9*0`(%rsp), %rdi
call avx2_mul_x4
#call avx2_normalize
`&STORE`
# Hsqr = U2*2
#lea 32*9*0(%rsp), %rsi
#lea 32*9*5(%rsp), %rdi
#call avx2_mul_by2_x4
vpaddq $ACC0, $ACC0, $ACC0 # inlined avx2_mul_by2_x4
lea `32*9*5`(%rsp), %rdi
vpaddq $ACC1, $ACC1, $ACC1
vpaddq $ACC2, $ACC2, $ACC2
vpaddq $ACC3, $ACC3, $ACC3
vpaddq $ACC4, $ACC4, $ACC4
vpaddq $ACC5, $ACC5, $ACC5
vpaddq $ACC6, $ACC6, $ACC6
vpaddq $ACC7, $ACC7, $ACC7
vpaddq $ACC8, $ACC8, $ACC8
call avx2_normalize_n_store
# X3 = R^2 - H^3
#lea 32*9*6(%rsp), %rsi
#lea 32*9*7(%rsp), %rdx
#lea 32*9*5(%rsp), %rcx
#lea 32*9*0($r_ptr), %rdi
#call avx2_sub_x4
#NORMALIZE
#STORE
# X3 = X3 - U2*2
#lea 32*9*0($r_ptr), %rsi
#lea 32*9*0($r_ptr), %rdi
#call avx2_sub_x4
#NORMALIZE
#STORE
lea `32*9*6+128`(%rsp), %rsi
lea .LAVX2_POLY_x2+128(%rip), %rax
lea `32*9*7+128`(%rsp), %rdx
lea `32*9*5+128`(%rsp), %rcx
lea `32*9*0`($r_ptr), %rdi
vmovdqa 32*0-128(%rsi), $ACC0
vmovdqa 32*1-128(%rsi), $ACC1
vmovdqa 32*2-128(%rsi), $ACC2
vmovdqa 32*3-128(%rsi), $ACC3
vmovdqa 32*4-128(%rsi), $ACC4
vmovdqa 32*5-128(%rsi), $ACC5
vmovdqa 32*6-128(%rsi), $ACC6
vmovdqa 32*7-128(%rsi), $ACC7
vmovdqa 32*8-128(%rsi), $ACC8
vpaddq 32*0-128(%rax), $ACC0, $ACC0
vpaddq 32*1-128(%rax), $ACC1, $ACC1
vpaddq 32*2-128(%rax), $ACC2, $ACC2
vpaddq 32*3-128(%rax), $ACC3, $ACC3
vpaddq 32*4-128(%rax), $ACC4, $ACC4
vpaddq 32*5-128(%rax), $ACC5, $ACC5
vpaddq 32*6-128(%rax), $ACC6, $ACC6
vpaddq 32*7-128(%rax), $ACC7, $ACC7
vpaddq 32*8-128(%rax), $ACC8, $ACC8
vpsubq 32*0-128(%rdx), $ACC0, $ACC0
vpsubq 32*1-128(%rdx), $ACC1, $ACC1
vpsubq 32*2-128(%rdx), $ACC2, $ACC2
vpsubq 32*3-128(%rdx), $ACC3, $ACC3
vpsubq 32*4-128(%rdx), $ACC4, $ACC4
vpsubq 32*5-128(%rdx), $ACC5, $ACC5
vpsubq 32*6-128(%rdx), $ACC6, $ACC6
vpsubq 32*7-128(%rdx), $ACC7, $ACC7
vpsubq 32*8-128(%rdx), $ACC8, $ACC8
vpsubq 32*0-128(%rcx), $ACC0, $ACC0
vpsubq 32*1-128(%rcx), $ACC1, $ACC1
vpsubq 32*2-128(%rcx), $ACC2, $ACC2
vpsubq 32*3-128(%rcx), $ACC3, $ACC3
vpsubq 32*4-128(%rcx), $ACC4, $ACC4
vpsubq 32*5-128(%rcx), $ACC5, $ACC5
vpsubq 32*6-128(%rcx), $ACC6, $ACC6
vpsubq 32*7-128(%rcx), $ACC7, $ACC7
vpsubq 32*8-128(%rcx), $ACC8, $ACC8
call avx2_normalize
lea 32*0($b_ptr), %rsi
lea 32*0($a_ptr), %rdx
call avx2_select_n_store
# H = U2 - X3
lea `32*9*0`(%rsp), %rsi
lea `32*9*0`($r_ptr), %rdx
lea `32*9*3`(%rsp), %rdi
call avx2_sub_x4
call avx2_normalize_n_store
# H = H*R
lea `32*9*3`(%rsp), %rsi
lea `32*9*4`(%rsp), %rdx
lea `32*9*3`(%rsp), %rdi
call avx2_mul_x4
call avx2_normalize_n_store
# S2 = S1 * H^3
lea `32*9*7`(%rsp), %rsi
lea `32*9*1`($a_ptr), %rdx
lea `32*9*1`(%rsp), %rdi
call avx2_mul_x4
call avx2_normalize_n_store
#
lea `32*9*3`(%rsp), %rsi
lea `32*9*1`(%rsp), %rdx
lea `32*9*1`($r_ptr), %rdi
call avx2_sub_x4
call avx2_normalize
lea 32*9($b_ptr), %rsi
lea 32*9($a_ptr), %rdx
call avx2_select_n_store
#lea 32*9*0($r_ptr), %rsi
#lea 32*9*0($r_ptr), %rdi
#call avx2_mul_by1_x4
#NORMALIZE
#STORE
lea `32*9*1`($r_ptr), %rsi
lea `32*9*1`($r_ptr), %rdi
call avx2_mul_by1_x4
call avx2_normalize_n_store
vzeroupper
___
$code.=<<___ if ($win64);
movaps %xmm6, -16*10(%rbp)
movaps %xmm7, -16*9(%rbp)
movaps %xmm8, -16*8(%rbp)
movaps %xmm9, -16*7(%rbp)
movaps %xmm10, -16*6(%rbp)
movaps %xmm11, -16*5(%rbp)
movaps %xmm12, -16*4(%rbp)
movaps %xmm13, -16*3(%rbp)
movaps %xmm14, -16*2(%rbp)
movaps %xmm15, -16*1(%rbp)
___
$code.=<<___;
mov %rbp, %rsp
pop %rbp
ret
.size ecp_nistz256_avx2_point_add_affines_x4,.-ecp_nistz256_avx2_point_add_affines_x4
################################################################################
# void ecp_nistz256_avx2_to_mont(void* RESULTx4, void *Ax4);
.globl ecp_nistz256_avx2_to_mont
.type ecp_nistz256_avx2_to_mont,\@function,2
.align 32
ecp_nistz256_avx2_to_mont:
vzeroupper
___
$code.=<<___ if ($win64);
lea -8-16*10(%rsp), %rsp
vmovaps %xmm6, -8-16*10(%rax)
vmovaps %xmm7, -8-16*9(%rax)
vmovaps %xmm8, -8-16*8(%rax)
vmovaps %xmm9, -8-16*7(%rax)
vmovaps %xmm10, -8-16*6(%rax)
vmovaps %xmm11, -8-16*5(%rax)
vmovaps %xmm12, -8-16*4(%rax)
vmovaps %xmm13, -8-16*3(%rax)
vmovaps %xmm14, -8-16*2(%rax)
vmovaps %xmm15, -8-16*1(%rax)
___
$code.=<<___;
vmovdqa .LAVX2_AND_MASK(%rip), $AND_MASK
lea .LTO_MONT_AVX2(%rip), %rdx
call avx2_mul_x4
call avx2_normalize_n_store
vzeroupper
___
$code.=<<___ if ($win64);
movaps 16*0(%rsp), %xmm6
movaps 16*1(%rsp), %xmm7
movaps 16*2(%rsp), %xmm8
movaps 16*3(%rsp), %xmm9
movaps 16*4(%rsp), %xmm10
movaps 16*5(%rsp), %xmm11
movaps 16*6(%rsp), %xmm12
movaps 16*7(%rsp), %xmm13
movaps 16*8(%rsp), %xmm14
movaps 16*9(%rsp), %xmm15
lea 8+16*10(%rsp), %rsp
___
$code.=<<___;
ret
.size ecp_nistz256_avx2_to_mont,.-ecp_nistz256_avx2_to_mont
################################################################################
# void ecp_nistz256_avx2_from_mont(void* RESULTx4, void *Ax4);
.globl ecp_nistz256_avx2_from_mont
.type ecp_nistz256_avx2_from_mont,\@function,2
.align 32
ecp_nistz256_avx2_from_mont:
vzeroupper
___
$code.=<<___ if ($win64);
lea -8-16*10(%rsp), %rsp
vmovaps %xmm6, -8-16*10(%rax)
vmovaps %xmm7, -8-16*9(%rax)
vmovaps %xmm8, -8-16*8(%rax)
vmovaps %xmm9, -8-16*7(%rax)
vmovaps %xmm10, -8-16*6(%rax)
vmovaps %xmm11, -8-16*5(%rax)
vmovaps %xmm12, -8-16*4(%rax)
vmovaps %xmm13, -8-16*3(%rax)
vmovaps %xmm14, -8-16*2(%rax)
vmovaps %xmm15, -8-16*1(%rax)
___
$code.=<<___;
vmovdqa .LAVX2_AND_MASK(%rip), $AND_MASK
lea .LFROM_MONT_AVX2(%rip), %rdx
call avx2_mul_x4
call avx2_normalize_n_store
vzeroupper
___
$code.=<<___ if ($win64);
movaps 16*0(%rsp), %xmm6
movaps 16*1(%rsp), %xmm7
movaps 16*2(%rsp), %xmm8
movaps 16*3(%rsp), %xmm9
movaps 16*4(%rsp), %xmm10
movaps 16*5(%rsp), %xmm11
movaps 16*6(%rsp), %xmm12
movaps 16*7(%rsp), %xmm13
movaps 16*8(%rsp), %xmm14
movaps 16*9(%rsp), %xmm15
lea 8+16*10(%rsp), %rsp
___
$code.=<<___;
ret
.size ecp_nistz256_avx2_from_mont,.-ecp_nistz256_avx2_from_mont
################################################################################
# void ecp_nistz256_avx2_set1(void* RESULTx4);
.globl ecp_nistz256_avx2_set1
.type ecp_nistz256_avx2_set1,\@function,1
.align 32
ecp_nistz256_avx2_set1:
lea .LONE+128(%rip), %rax
lea 128(%rdi), %rdi
vzeroupper
vmovdqa 32*0-128(%rax), %ymm0
vmovdqa 32*1-128(%rax), %ymm1
vmovdqa 32*2-128(%rax), %ymm2
vmovdqa 32*3-128(%rax), %ymm3
vmovdqa 32*4-128(%rax), %ymm4
vmovdqa 32*5-128(%rax), %ymm5
vmovdqa %ymm0, 32*0-128(%rdi)
vmovdqa 32*6-128(%rax), %ymm0
vmovdqa %ymm1, 32*1-128(%rdi)
vmovdqa 32*7-128(%rax), %ymm1
vmovdqa %ymm2, 32*2-128(%rdi)
vmovdqa 32*8-128(%rax), %ymm2
vmovdqa %ymm3, 32*3-128(%rdi)
vmovdqa %ymm4, 32*4-128(%rdi)
vmovdqa %ymm5, 32*5-128(%rdi)
vmovdqa %ymm0, 32*6-128(%rdi)
vmovdqa %ymm1, 32*7-128(%rdi)
vmovdqa %ymm2, 32*8-128(%rdi)
vzeroupper
ret
.size ecp_nistz256_avx2_set1,.-ecp_nistz256_avx2_set1
___
}
{
################################################################################
# void ecp_nistz256_avx2_multi_gather_w7(void* RESULT, void *in,
# int index0, int index1, int index2, int index3);
################################################################################
my ($val,$in_t,$index0,$index1,$index2,$index3)=("%rdi","%rsi","%edx","%ecx","%r8d","%r9d");
my ($INDEX0,$INDEX1,$INDEX2,$INDEX3)=map("%ymm$_",(0..3));
my ($R0a,$R0b,$R1a,$R1b,$R2a,$R2b,$R3a,$R3b)=map("%ymm$_",(4..11));
my ($M0,$T0,$T1,$TMP0)=map("%ymm$_",(12..15));
$code.=<<___;
.globl ecp_nistz256_avx2_multi_gather_w7
.type ecp_nistz256_avx2_multi_gather_w7,\@function,6
.align 32
ecp_nistz256_avx2_multi_gather_w7:
vzeroupper
___
$code.=<<___ if ($win64);
lea -8-16*10(%rsp), %rsp
vmovaps %xmm6, -8-16*10(%rax)
vmovaps %xmm7, -8-16*9(%rax)
vmovaps %xmm8, -8-16*8(%rax)
vmovaps %xmm9, -8-16*7(%rax)
vmovaps %xmm10, -8-16*6(%rax)
vmovaps %xmm11, -8-16*5(%rax)
vmovaps %xmm12, -8-16*4(%rax)
vmovaps %xmm13, -8-16*3(%rax)
vmovaps %xmm14, -8-16*2(%rax)
vmovaps %xmm15, -8-16*1(%rax)
___
$code.=<<___;
lea .LIntOne(%rip), %rax
vmovd $index0, %xmm0
vmovd $index1, %xmm1
vmovd $index2, %xmm2
vmovd $index3, %xmm3
vpxor $R0a, $R0a, $R0a
vpxor $R0b, $R0b, $R0b
vpxor $R1a, $R1a, $R1a
vpxor $R1b, $R1b, $R1b
vpxor $R2a, $R2a, $R2a
vpxor $R2b, $R2b, $R2b
vpxor $R3a, $R3a, $R3a
vpxor $R3b, $R3b, $R3b
vmovdqa (%rax), $M0
vpermd $INDEX0, $R0a, $INDEX0
vpermd $INDEX1, $R0a, $INDEX1
vpermd $INDEX2, $R0a, $INDEX2
vpermd $INDEX3, $R0a, $INDEX3
mov \$64, %ecx
lea 112($val), $val # size optimization
jmp .Lmulti_select_loop_avx2
# INDEX=0, corresponds to the point at infty (0,0)
.align 32
.Lmulti_select_loop_avx2:
vpcmpeqd $INDEX0, $M0, $TMP0
vmovdqa `32*0+32*64*2*0`($in_t), $T0
vmovdqa `32*1+32*64*2*0`($in_t), $T1
vpand $TMP0, $T0, $T0
vpand $TMP0, $T1, $T1
vpxor $T0, $R0a, $R0a
vpxor $T1, $R0b, $R0b
vpcmpeqd $INDEX1, $M0, $TMP0
vmovdqa `32*0+32*64*2*1`($in_t), $T0
vmovdqa `32*1+32*64*2*1`($in_t), $T1
vpand $TMP0, $T0, $T0
vpand $TMP0, $T1, $T1
vpxor $T0, $R1a, $R1a
vpxor $T1, $R1b, $R1b
vpcmpeqd $INDEX2, $M0, $TMP0
vmovdqa `32*0+32*64*2*2`($in_t), $T0
vmovdqa `32*1+32*64*2*2`($in_t), $T1
vpand $TMP0, $T0, $T0
vpand $TMP0, $T1, $T1
vpxor $T0, $R2a, $R2a
vpxor $T1, $R2b, $R2b
vpcmpeqd $INDEX3, $M0, $TMP0
vmovdqa `32*0+32*64*2*3`($in_t), $T0
vmovdqa `32*1+32*64*2*3`($in_t), $T1
vpand $TMP0, $T0, $T0
vpand $TMP0, $T1, $T1
vpxor $T0, $R3a, $R3a
vpxor $T1, $R3b, $R3b
vpaddd (%rax), $M0, $M0 # increment
lea 32*2($in_t), $in_t
dec %ecx
jnz .Lmulti_select_loop_avx2
vmovdqu $R0a, 32*0-112($val)
vmovdqu $R0b, 32*1-112($val)
vmovdqu $R1a, 32*2-112($val)
vmovdqu $R1b, 32*3-112($val)
vmovdqu $R2a, 32*4-112($val)
vmovdqu $R2b, 32*5-112($val)
vmovdqu $R3a, 32*6-112($val)
vmovdqu $R3b, 32*7-112($val)
vzeroupper
___
$code.=<<___ if ($win64);
movaps 16*0(%rsp), %xmm6
movaps 16*1(%rsp), %xmm7
movaps 16*2(%rsp), %xmm8
movaps 16*3(%rsp), %xmm9
movaps 16*4(%rsp), %xmm10
movaps 16*5(%rsp), %xmm11
movaps 16*6(%rsp), %xmm12
movaps 16*7(%rsp), %xmm13
movaps 16*8(%rsp), %xmm14
movaps 16*9(%rsp), %xmm15
lea 8+16*10(%rsp), %rsp
___
$code.=<<___;
ret
.size ecp_nistz256_avx2_multi_gather_w7,.-ecp_nistz256_avx2_multi_gather_w7
.extern OPENSSL_ia32cap_P
.globl ecp_nistz_avx2_eligible
.type ecp_nistz_avx2_eligible,\@abi-omnipotent
.align 32
ecp_nistz_avx2_eligible:
mov OPENSSL_ia32cap_P+8(%rip),%eax
shr \$5,%eax
and \$1,%eax
ret
.size ecp_nistz_avx2_eligible,.-ecp_nistz_avx2_eligible
___
}
}} else {{ # assembler is too old
$code.=<<___;
.text
.globl ecp_nistz256_avx2_transpose_convert
.globl ecp_nistz256_avx2_convert_transpose_back
.globl ecp_nistz256_avx2_point_add_affine_x4
.globl ecp_nistz256_avx2_point_add_affines_x4
.globl ecp_nistz256_avx2_to_mont
.globl ecp_nistz256_avx2_from_mont
.globl ecp_nistz256_avx2_set1
.globl ecp_nistz256_avx2_multi_gather_w7
.type ecp_nistz256_avx2_multi_gather_w7,\@abi-omnipotent
ecp_nistz256_avx2_transpose_convert:
ecp_nistz256_avx2_convert_transpose_back:
ecp_nistz256_avx2_point_add_affine_x4:
ecp_nistz256_avx2_point_add_affines_x4:
ecp_nistz256_avx2_to_mont:
ecp_nistz256_avx2_from_mont:
ecp_nistz256_avx2_set1:
ecp_nistz256_avx2_multi_gather_w7:
.byte 0x0f,0x0b # ud2
ret
.size ecp_nistz256_avx2_multi_gather_w7,.-ecp_nistz256_avx2_multi_gather_w7
.globl ecp_nistz_avx2_eligible
.type ecp_nistz_avx2_eligible,\@abi-omnipotent
ecp_nistz_avx2_eligible:
xor %eax,%eax
ret
.size ecp_nistz_avx2_eligible,.-ecp_nistz_avx2_eligible
___
}}
foreach (split("\n",$code)) {
s/\`([^\`]*)\`/eval($1)/geo;
print $_,"\n";
}
close STDOUT;
| 26.951946 | 109 | 0.624476 |
ed278b43f8f0be223aac8bda87b8ba5835942c66 | 2,601 | pm | Perl | lib/WWW/PipeViewer/RegularExpressions.pm | Nikola2222/pipe-viewer | b45293b605efcab47d86ef59c9263c753687737e | [
"Artistic-2.0"
] | null | null | null | lib/WWW/PipeViewer/RegularExpressions.pm | Nikola2222/pipe-viewer | b45293b605efcab47d86ef59c9263c753687737e | [
"Artistic-2.0"
] | null | null | null | lib/WWW/PipeViewer/RegularExpressions.pm | Nikola2222/pipe-viewer | b45293b605efcab47d86ef59c9263c753687737e | [
"Artistic-2.0"
] | null | null | null | package WWW::PipeViewer::RegularExpressions;
use utf8;
use 5.014;
use warnings;
require Exporter;
our @ISA = qw(Exporter);
=head1 NAME
WWW::PipeViewer::RegularExpressions - Various utils.
=head1 SYNOPSIS
use WWW::PipeViewer::RegularExpressions;
use WWW::PipeViewer::RegularExpressions ($get_video_id_re);
=cut
my $opt_begin_chars = q{:;=}; # stdin option valid begin chars
# Options
our $range_num_re = qr{^([0-9]{1,3}+)(?>-|\.\.)([0-9]{1,3}+)?\z};
our $digit_or_equal_re = qr/(?(?=[1-9])|=)/;
our $non_digit_or_opt_re = qr{^(?!$range_num_re)(?>[0-9]{1,3}[^0-9]|[0-9]{4}|[^0-9$opt_begin_chars])};
# Generic name
my $generic_name_re = qr/[a-zA-Z0-9_.\-]{11,64}/;
our $valid_channel_id_re = qr{^(?:.*/(?:channel|c)/)?(?<channel_id>(?:[%\w]+(?:[-.]++[%\w]++)*|$generic_name_re))(?:/.*)?\z};
our $get_channel_videos_id_re = qr{^.*/(?:channel|c)/(?<channel_id>(?:[%\w]+(?:[-.]++[%\w]++)*|$generic_name_re))};
our $get_channel_playlists_id_re = qr{$get_channel_videos_id_re/playlists};
our $get_username_videos_re = qr{^.*/user/(?<username>[-.\w]+)};
our $get_username_playlists_re = qr{$get_username_videos_re/playlists};
# Video ID
my $video_id_re = qr/[0-9A-Za-z_\-]{11}/;
our $valid_video_id_re = qr{^$video_id_re\z};
our $get_video_id_re = qr{(?:%3F|%2F|\b)(?>v|embed|shorts|youtu(?:\\)?[.]be)(?>(?:\\)?[=/]|%3D|%2F)(?<video_id>$video_id_re)};
# Playlist ID
our $valid_playlist_id_re = qr{^$generic_name_re\z};
our $get_playlist_id_re = qr{(?:(?:(?>playlist\?list|view_play_list\?p|list)=)|\w#p/c/)(?<playlist_id>$generic_name_re)\b};
our $valid_opt_re = qr{^[$opt_begin_chars]([A-Za-z]++(?:-[A-Za-z]++)?(?>${digit_or_equal_re}.*)?)$};
our @EXPORT = qw(
$range_num_re
$digit_or_equal_re
$non_digit_or_opt_re
$valid_channel_id_re
$valid_video_id_re
$get_video_id_re
$valid_playlist_id_re
$get_playlist_id_re
$valid_opt_re
$get_channel_videos_id_re
$get_channel_playlists_id_re
$get_username_videos_re
$get_username_playlists_re
);
=head1 AUTHOR
Trizen, C<< <echo dHJpemVuQHByb3Rvbm1haWwuY29tCg== | base64 -d> >>
=head1 SUPPORT
You can find documentation for this module with the perldoc command.
perldoc WWW::PipeViewer::RegularExpressions
=head1 LICENSE AND COPYRIGHT
Copyright 2012-2013 Trizen.
This program is free software; you can redistribute it and/or modify it
under the terms of either: the GNU General Public License as published
by the Free Software Foundation; or the Artistic License.
See L<https://dev.perl.org/licenses/> for more information.
=cut
1; # End of WWW::PipeViewer::RegularExpressions
| 28.9 | 128 | 0.685121 |
edd9683edaf58ad45e1f86b8f3e5c85ca7659bb5 | 26,844 | pm | Perl | lib/perl/lib/Apache/TS/AdminClient.pm | syucream/trafficserver | 144b2ef0a80c154ae132c9e73e94a5b4596c2b81 | [
"Apache-2.0"
] | null | null | null | lib/perl/lib/Apache/TS/AdminClient.pm | syucream/trafficserver | 144b2ef0a80c154ae132c9e73e94a5b4596c2b81 | [
"Apache-2.0"
] | null | null | null | lib/perl/lib/Apache/TS/AdminClient.pm | syucream/trafficserver | 144b2ef0a80c154ae132c9e73e94a5b4596c2b81 | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
package Apache::TS::AdminClient;
use warnings;
use strict;
require 5.006;
use Carp;
use IO::Socket::UNIX;
use IO::Select;
use Apache::TS;
# Mgmt API command constants, should track ts/mgmtapi.h
use constant {
TS_FILE_READ => 0,
TS_FILE_WRITE => 1,
TS_RECORD_SET => 2,
TS_RECORD_GET => 3,
TS_PROXY_STATE_GET => 4,
TS_PROXY_STATE_SET => 5,
TS_RECONFIGURE => 6,
TS_RESTART => 7,
TS_BOUNCE => 8,
TS_EVENT_RESOLVE => 9,
TS_EVENT_GET_MLT => 10,
TS_EVENT_ACTIVE => 11,
TS_EVENT_REG_CALLBACK => 12,
TS_EVENT_UNREG_CALLBACK => 13,
TS_EVENT_NOTIFY => 14,
TS_SNAPSHOT_TAKE => 15,
TS_SNAPSHOT_RESTORE => 16,
TS_SNAPSHOT_REMOVE => 17,
TS_SNAPSHOT_GET_MLT => 18,
TS_DIAGS => 19,
TS_STATS_RESET => 20
};
# We treat both REC_INT and REC_COUNTER the same here
use constant {
TS_REC_INT => 0,
TS_REC_COUNTER => 0,
TS_REC_FLOAT => 2,
TS_REC_STRING => 3
};
use constant {
TS_ERR_OKAY => 0,
TS_ERR_READ_FILE => 1,
TS_ERR_WRITE_FILE => 2,
TS_ERR_PARSE_CONFIG_RULE => 3,
TS_ERR_INVALID_CONFIG_RULE => 4,
TS_ERR_NET_ESTABLISH => 5,
TS_ERR_NET_READ => 6,
TS_ERR_NET_WRITE => 7,
TS_ERR_NET_EOF => 8,
TS_ERR_NET_TIMEOUT => 9,
TS_ERR_SYS_CALL => 10,
TS_ERR_PARAMS => 11,
TS_ERR_FAIL => 12
};
# Semi-intelligent way of finding the mgmtapi socket.
sub _find_socket {
my $path = shift || "";
my $name = shift || "mgmtapisocket";
my @sockets_def = (
$path,
Apache::TS::PREFIX . '/' . Apache::TS::REL_RUNTIMEDIR . '/' . 'mgmtapisocket',
'/usr/local/var/trafficserver',
'/usr/local/var/run/trafficserver',
'/usr/local/var/run',
'/var/trafficserver',
'/var/run/trafficserver',
'/var/run',
'/opt/ats/var/trafficserver',
);
foreach my $socket (@sockets_def) {
return $socket if (-S $socket);
return "${socket}/${name}" if (-S "${socket}/${name}");
}
return undef;
}
#
# Constructor
#
sub new {
my ($class, %args) = @_;
my $self = {};
$self->{_socket_path} = _find_socket($args{socket_path});
$self->{_socket} = undef;
croak
"Unable to locate socket, please pass socket_path with the management api socket location to Apache::TS::AdminClient"
if (!$self->{_socket_path});
if ((!-r $self->{_socket_path}) or (!-w $self->{_socket_path}) or (!-S $self->{_socket_path})) {
croak "Unable to open $self->{_socket_path} for reads or writes";
}
$self->{_select} = IO::Select->new();
bless $self, $class;
$self->open_socket();
return $self;
}
#
# Destructor
#
sub DESTROY {
my $self = shift;
return $self->close_socket();
}
#
# Open the socket (Unix domain)
#
sub open_socket {
my $self = shift;
my %args = @_;
if (defined($self->{_socket})) {
if ($args{force} || $args{reopen}) {
$self->close_socket();
}
else {
return undef;
}
}
$self->{_socket} = IO::Socket::UNIX->new(
Type => SOCK_STREAM,
Peer => $self->{_socket_path}
) or croak("Error opening socket - $@");
return undef unless defined($self->{_socket});
$self->{_select}->add($self->{_socket});
return $self;
}
sub close_socket {
my $self = shift;
# if socket doesn't exist, return as there's nothing to do.
return unless defined($self->{_socket});
# gracefully close socket.
$self->{_select}->remove($self->{_socket});
$self->{_socket}->close();
$self->{_socket} = undef;
return $self;
}
#
# Do reads()'s on our Unix domain socket, takes an optional timeout, in ms's.
#
sub _do_read {
my $self = shift;
my $timeout = shift || 1/1000.0; # 1ms by default
my $res = "";
while ($self->{_select}->can_read($timeout)) {
my $rc = $self->{_socket}->sysread($res, 1024, length($res));
}
return $res || undef;
}
#
# Get (read) a stat out of the local manager. Note that the assumption is
# that you are calling this with an existing stats "name".
#
sub get_stat {
my ($self, $stat) = @_;
my $res = "";
my $max_read_attempts = 25;
return undef unless defined($self->{_socket});
return undef unless $self->{_select}->can_write(10);
# This is a total hack for now, we need to wrap this into the proper mgmt API library.
$self->{_socket}->print(pack("sla*", TS_RECORD_GET, length($stat)), $stat);
$res = $self->_do_read();
my @resp = unpack("slls", $res);
return undef unless (scalar(@resp) == 4);
if ($resp[0] == TS_ERR_OKAY) {
if ($resp[3] < TS_REC_FLOAT) {
@resp = unpack("sllsq", $res);
return undef unless (scalar(@resp) == 5);
return int($resp[4]);
}
elsif ($resp[3] == TS_REC_FLOAT) {
@resp = unpack("sllsf", $res);
return undef unless (scalar(@resp) == 5);
return $resp[4];
}
elsif ($resp[3] == TS_REC_STRING) {
@resp = unpack("sllsa*", $res);
return undef unless (scalar(@resp) == 5);
my @result = split($stat, $resp[4]);
return $result[0];
}
}
return undef;
}
*get_config = \&get_stat;
1;
__END__
#-=-=-=-=-=-=-=-= Give us some POD please =-=-=-=-=-=-=-=-
=head1 NAME:
Apache::TS::AdminClient - a perl interface to the statistics and configuration settings stored within Apache Traffic Server.
=head1 SYNOPSIS
#!/usr/bin/perl
use Apache::TS::AdminClient;
my $cli = Apache::TS::AdminClient->new(%input);
my $string = $cli->get_stat("proxy.config.product_company");
print "$string\n";
=head1 DESCRIPTION:
AdminClient opens a TCP connection to a unix domain socket on local disk. When the connection is established,
AdminClient will write requests to the socket and wait for Apache Traffic Server to return a response. Valid
request strings can be found in RecordsConfig.cc which is included with Apache Traffic Server source.
A list of valid request strings are included with this documentation, but this included list may not be complete
as future releases of Apache Traffic Server may include new request strings or remove existing ones.
=head1 CONSTRUCTOR
When the object is created for this module, it assumes the 'Unix Domain Socket' is at the default location from
the Apache Traffic Server installation. This can be changed when creating the object by setting B<'socket_path'>.
For example:
=over 4
=item my $cli = AdminClient->new(socket_path=> "/var/trafficserver");
This would make the module look for the 'Unix Domain Socket' in the directory '/var/trafficserver'. The path
can optionally include the name of the Socket file, without it the constructor defaults to 'mgmtapisocket'.
=back
=head1 PUBLIC METHODS
To read a single metric (or configuration), two APIs are available:
=over 4
=item $cli->get_stat($stats_name);
=item $cli->get_config($config_name);
This will return a (scalar) value for this metric or configuration.
=back
=head1 traffic_line
There is a command line tool included with Apache Traffic Server called traffic_line which overlaps with this module. traffic_line
can be used to read and write statistics or config settings that this module can. Hence if you don't want to write a perl one-liner to
get to this information, traffic_line is your tool.
=head1 List of configurations
The Apache Traffic Server Administration Manual will explain what these strings represent. (http://trafficserver.apache.org/docs/)
proxy.config.accept_threads
proxy.config.task_threads
proxy.config.admin.admin_user
proxy.config.admin.autoconf.localhost_only
proxy.config.admin.autoconf.pac_filename
proxy.config.admin.autoconf_port
proxy.config.admin.autoconf.doc_root
proxy.config.admin.cli_path
proxy.config.admin.number_config_bak
proxy.config.admin.user_id
proxy.config.alarm.abs_path
proxy.config.alarm.bin
proxy.config.alarm_email
proxy.config.alarm.script_runtime
proxy.config.bandwidth_mgmt.filename
proxy.config.bin_path
proxy.config.body_factory.enable_customizations
proxy.config.body_factory.enable_logging
proxy.config.body_factory.response_suppression_mode
proxy.config.body_factory.template_sets_dir
proxy.config.cache.agg_write_backlog
proxy.config.cache.alt_rewrite_max_size
proxy.config.cache.control.filename
proxy.config.cache.dir.sync_frequency
proxy.config.cache.enable_checksum
proxy.config.cache.enable_read_while_writer
proxy.config.cache.hostdb.disable_reverse_lookup
proxy.config.cache.hostdb.sync_frequency
proxy.config.cache.hosting_filename
proxy.config.cache.ip_allow.filename
proxy.config.cache.limits.http.max_alts
proxy.config.cache.max_disk_errors
proxy.config.cache.max_doc_size
proxy.config.cache.min_average_object_size
proxy.config.cache.volume_filename
proxy.config.cache.permit.pinning
proxy.config.cache.ram_cache_cutoff
proxy.config.cache.ram_cache.size
proxy.config.cache.select_alternate
proxy.config.cache.storage_filename
proxy.config.cache.threads_per_disk
proxy.config.cache.url_hash_method
proxy.config.cache.vary_on_user_agent
proxy.config.cache.mutex_retry_delay
proxy.config.cluster.cluster_configuration
proxy.config.cluster.cluster_load_clear_duration
proxy.config.cluster.cluster_load_exceed_duration
proxy.config.cluster.cluster_port
proxy.config.cluster.delta_thresh
proxy.config.cluster.enable_monitor
proxy.config.cluster.ethernet_interface
proxy.config.cluster.load_compute_interval_msecs
proxy.config.cluster.load_monitor_enabled
proxy.config.cluster.log_bogus_mc_msgs
proxy.config.cluster.mc_group_addr
proxy.config.cluster.mcport
proxy.config.cluster.mc_ttl
proxy.config.cluster.monitor_interval_secs
proxy.config.cluster.msecs_per_ping_response_bucket
proxy.config.cluster.peer_timeout
proxy.config.cluster.periodic_timer_interval_msecs
proxy.config.cluster.ping_history_buf_length
proxy.config.cluster.ping_latency_threshold_msecs
proxy.config.cluster.ping_response_buckets
proxy.config.cluster.ping_send_interval_msecs
proxy.config.cluster.receive_buffer_size
proxy.config.cluster.rpc_cache_cluster
proxy.config.cluster.rsport
proxy.config.cluster.send_buffer_size
proxy.config.cluster.sock_option_flag
proxy.config.cluster.startup_timeout
proxy.config.cluster.threads
proxy.config.config_dir
proxy.config.cop.core_signal
proxy.config.cop.linux_min_memfree_kb
proxy.config.cop.linux_min_swapfree_kb
proxy.config.core_limit
proxy.config.diags.action.enabled
proxy.config.diags.action.tags
proxy.config.diags.debug.enabled
proxy.config.diags.debug.tags
proxy.config.diags.output.alert
proxy.config.diags.output.debug
proxy.config.diags.output.diag
proxy.config.diags.output.emergency
proxy.config.diags.output.error
proxy.config.diags.output.fatal
proxy.config.diags.output.note
proxy.config.diags.output.status
proxy.config.diags.output.warning
proxy.config.diags.show_location
proxy.config.dns.failover_number
proxy.config.dns.failover_period
proxy.config.dns.lookup_timeout
proxy.config.dns.max_dns_in_flight
proxy.config.dns.nameservers
proxy.config.dns.resolv_conf
proxy.config.dns.retries
proxy.config.dns.round_robin_nameservers
proxy.config.dns.search_default_domains
proxy.config.dns.splitDNS.enabled
proxy.config.dns.splitdns.filename
proxy.config.dns.url_expansions
proxy.config.dump_mem_info_frequency
proxy.config.env_prep
proxy.config.exec_thread.autoconfig
proxy.config.exec_thread.autoconfig.scale
proxy.config.exec_thread.limit
proxy.config.header.parse.no_host_url_redirect
proxy.config.hostdb
proxy.config.hostdb.cluster
proxy.config.hostdb.cluster.round_robin
proxy.config.hostdb.fail.timeout
proxy.config.hostdb.filename
proxy.config.hostdb.lookup_timeout
proxy.config.hostdb.migrate_on_demand
proxy.config.hostdb.re_dns_on_reload
proxy.config.hostdb.serve_stale_for
proxy.config.hostdb.size
proxy.config.hostdb.storage_path
proxy.config.hostdb.storage_size
proxy.config.hostdb.strict_round_robin
proxy.config.hostdb.timeout
proxy.config.hostdb.ttl_mode
proxy.config.hostdb.verify_after
proxy.config.http.accept_encoding_filter.filename
proxy.config.http.accept_no_activity_timeout
proxy.config.http.anonymize_insert_client_ip
proxy.config.http.anonymize_other_header_list
proxy.config.http.anonymize_remove_client_ip
proxy.config.http.anonymize_remove_cookie
proxy.config.http.anonymize_remove_from
proxy.config.http.anonymize_remove_referer
proxy.config.http.anonymize_remove_user_agent
proxy.config.http.background_fill_active_timeout
proxy.config.http.background_fill_completed_threshold
proxy.config.http.cache.cache_responses_to_cookies
proxy.config.http.cache.cache_urls_that_look_dynamic
proxy.config.http.cache.enable_default_vary_headers
proxy.config.http.cache.fuzz.min_time
proxy.config.http.cache.fuzz.probability
proxy.config.http.cache.fuzz.time
proxy.config.http.cache.guaranteed_max_lifetime
proxy.config.http.cache.guaranteed_min_lifetime
proxy.config.http.cache.heuristic_lm_factor
proxy.config.http.cache.heuristic_max_lifetime
proxy.config.http.cache.heuristic_min_lifetime
proxy.config.http.cache.http
proxy.config.http.cache.ignore_accept_charset_mismatch
proxy.config.http.cache.ignore_accept_encoding_mismatch
proxy.config.http.cache.ignore_accept_language_mismatch
proxy.config.http.cache.ignore_accept_mismatch
proxy.config.http.cache.ignore_authentication
proxy.config.http.cache.ignore_client_cc_max_age
proxy.config.http.cache.cluster_cache_local
proxy.config.http.cache.ignore_client_no_cache
proxy.config.http.cache.ignore_server_no_cache
proxy.config.http.cache.ims_on_client_no_cache
proxy.config.http.cache.max_open_read_retries
proxy.config.http.cache.max_open_write_retries
proxy.config.http.cache.max_stale_age
proxy.config.http.cache.open_read_retry_time
proxy.config.http.cache.range.lookup
proxy.config.http.cache.required_headers
proxy.config.http.cache.vary_default_images
proxy.config.http.cache.vary_default_other
proxy.config.http.cache.vary_default_text
proxy.config.http.cache.when_to_add_no_cache_to_msie_requests
proxy.config.http.cache.when_to_revalidate
proxy.config.http.chunking_enabled
proxy.config.http.congestion_control.default.client_wait_interval
proxy.config.http.congestion_control.default.congestion_scheme
proxy.config.http.congestion_control.default.dead_os_conn_retries
proxy.config.http.congestion_control.default.dead_os_conn_timeout
proxy.config.http.congestion_control.default.error_page
proxy.config.http.congestion_control.default.fail_window
proxy.config.http.congestion_control.default.live_os_conn_retries
proxy.config.http.congestion_control.default.live_os_conn_timeout
proxy.config.http.congestion_control.default.max_connection
proxy.config.http.congestion_control.default.max_connection_failures
proxy.config.http.congestion_control.default.proxy_retry_interval
proxy.config.http.congestion_control.default.wait_interval_alpha
proxy.config.http.congestion_control.enabled
proxy.config.http.congestion_control.filename
proxy.config.http.congestion_control.localtime
proxy.config.http.connect_attempts_max_retries
proxy.config.http.connect_attempts_max_retries_dead_server
proxy.config.http.connect_attempts_rr_retries
proxy.config.http.connect_attempts_timeout
proxy.config.http.connect_ports
proxy.config.http.default_buffer_size
proxy.config.http.default_buffer_water_mark
proxy.config.http.doc_in_cache_skip_dns
proxy.config.http.down_server.abort_threshold
proxy.config.http.down_server.cache_time
proxy.config.http.enabled
proxy.config.http.enable_http_info
proxy.config.http.enable_http_stats
proxy.config.http.enable_url_expandomatic
proxy.config.http.errors.log_error_pages
proxy.config.http.forward.proxy_auth_to_parent
proxy.config.http.global_user_agent_header
proxy.config.http.insert_age_in_response
proxy.config.http.insert_request_via_str
proxy.config.http.insert_response_via_str
proxy.config.http.insert_squid_x_forwarded_for
proxy.config.http.keep_alive_enabled_in
proxy.config.http.keep_alive_enabled_out
proxy.config.http.keep_alive_no_activity_timeout_in
proxy.config.http.keep_alive_no_activity_timeout_out
proxy.config.http.keep_alive_post_out
proxy.config.http.negative_caching_enabled
proxy.config.http.negative_caching_lifetime
proxy.config.http.negative_revalidating_enabled
proxy.config.http.negative_revalidating_lifetime
proxy.config.http.no_dns_just_forward_to_parent
proxy.config.http.no_origin_server_dns
proxy.config.http.normalize_ae_gzip
proxy.config.http.number_of_redirections
proxy.config.http.origin_max_connections
proxy.config.http.origin_min_keep_alive_connections
proxy.config.http.parent_proxies
proxy.config.http.parent_proxy.connect_attempts_timeout
proxy.config.http.parent_proxy.fail_threshold
proxy.config.http.parent_proxy.file
proxy.config.http.parent_proxy.per_parent_connect_attempts
proxy.config.http.parent_proxy.retry_time
proxy.config.http.parent_proxy_routing_enable
proxy.config.http.parent_proxy.total_connect_attempts
proxy.config.http.post_connect_attempts_timeout
proxy.config.http.post_copy_size
proxy.config.http.push_method_enabled
proxy.config.http.quick_filter.mask
proxy.config.http.record_heartbeat
proxy.config.http.record_tcp_mem_hit
proxy.config.http.redirection_enabled
proxy.config.http.referer_default_redirect
proxy.config.http.referer_filter
proxy.config.http.referer_format_redirect
proxy.config.http.request_header_max_size
proxy.config.http.request_via_str
proxy.config.http.response_header_max_size
proxy.config.http.response_server_enabled
proxy.config.http.response_server_str
proxy.config.http.response_via_str
proxy.config.http.send_http11_requests
proxy.config.http.server_max_connections
proxy.config.http.server_port
proxy.config.http.server_port_attr
proxy.config.http.share_server_sessions
proxy.config.http.slow.log.threshold
proxy.config.http.connect_ports
proxy.config.http.transaction_active_timeout_in
proxy.config.http.transaction_active_timeout_out
proxy.config.http.transaction_no_activity_timeout_in
proxy.config.http.transaction_no_activity_timeout_out
proxy.config.http_ui_enabled
proxy.config.http.uncacheable_requests_bypass_parent
proxy.config.icp.default_reply_port
proxy.config.icp.enabled
proxy.config.icp.icp_configuration
proxy.config.icp.icp_interface
proxy.config.icp.icp_port
proxy.config.icp.lookup_local
proxy.config.icp.multicast_enabled
proxy.config.icp.query_timeout
proxy.config.icp.reply_to_unknown_peer
proxy.config.icp.stale_icp_enabled
proxy.config.io.max_buffer_size
proxy.config.lm.pserver_timeout_msecs
proxy.config.lm.pserver_timeout_secs
proxy.config.lm.sem_id
proxy.config.local_state_dir
proxy.config.log.ascii_buffer_size
proxy.config.log.auto_delete_rolled_files
proxy.config.log.collation_host
proxy.config.log.collation_host_tagged
proxy.config.log.collation_max_send_buffers
proxy.config.log.collation_port
proxy.config.log.collation_retry_sec
proxy.config.log.collation_secret
proxy.config.log.common_log_enabled
proxy.config.log.common_log_header
proxy.config.log.common_log_is_ascii
proxy.config.log.common_log_name
proxy.config.log.custom_logs_enabled
proxy.config.log.extended2_log_enabled
proxy.config.log.extended2_log_header
proxy.config.log.extended2_log_is_ascii
proxy.config.log.extended2_log_name
proxy.config.log.extended_log_enabled
proxy.config.log.extended_log_header
proxy.config.log.extended_log_is_ascii
proxy.config.log.extended_log_name
proxy.config.log.file_stat_frequency
proxy.config.log.hostname
proxy.config.log.hosts_config_file
proxy.config.log.log_buffer_size
proxy.config.log.logfile_dir
proxy.config.log.logfile_perm
proxy.config.log.logging_enabled
proxy.config.log.max_line_size
proxy.config.log.max_secs_per_buffer
proxy.config.log.max_space_mb_for_logs
proxy.config.log.max_space_mb_for_orphan_logs
proxy.config.log.max_space_mb_headroom
proxy.config.log.overspill_report_count
proxy.config.log.rolling_enabled
proxy.config.log.rolling_interval_sec
proxy.config.log.rolling_offset_hr
proxy.config.log.rolling_size_mb
proxy.config.log.sampling_frequency
proxy.config.log.search_log_enabled
proxy.config.log.search_log_filters
proxy.config.log.search_rolling_interval_sec
proxy.config.log.search_server_ip_addr
proxy.config.log.search_server_port
proxy.config.log.search_top_sites
proxy.config.log.search_url_filter
proxy.config.log.separate_host_logs
proxy.config.log.separate_icp_logs
proxy.config.log.space_used_frequency
proxy.config.log.squid_log_enabled
proxy.config.log.squid_log_header
proxy.config.log.squid_log_is_ascii
proxy.config.log.squid_log_name
proxy.config.log.xml_config_file
proxy.config.manager_binary
proxy.config.net.connections_throttle
proxy.config.net.listen_backlog
proxy.config.net_snapshot_filename
proxy.config.net.sock_mss_in
proxy.config.net.sock_option_flag_in
proxy.config.net.sock_option_flag_out
proxy.config.net.sock_recv_buffer_size_in
proxy.config.net.sock_recv_buffer_size_out
proxy.config.net.sock_send_buffer_size_in
proxy.config.net.sock_send_buffer_size_out
proxy.config.net.defer_accept
proxy.config.output.logfile
proxy.config.ping.npacks_to_trans
proxy.config.ping.timeout_sec
proxy.config.plugin.plugin_dir
proxy.config.plugin.plugin_mgmt_dir
proxy.config.prefetch.child_port
proxy.config.prefetch.config_file
proxy.config.prefetch.default_data_proto
proxy.config.prefetch.default_url_proto
proxy.config.prefetch.keepalive_timeout
proxy.config.prefetch.max_object_size
proxy.config.prefetch.max_recursion
proxy.config.prefetch.prefetch_enabled
proxy.config.prefetch.push_cached_objects
proxy.config.prefetch.redirection
proxy.config.prefetch.url_buffer_size
proxy.config.prefetch.url_buffer_timeout
proxy.config.process_manager.enable_mgmt_port
proxy.config.process_manager.mgmt_port
proxy.config.process_manager.timeout
proxy.config.product_company
proxy.config.product_name
proxy.config.product_vendor
proxy.config.proxy.authenticate.basic.realm
proxy.config.proxy_binary
proxy.config.proxy_binary_opts
proxy.config.proxy_name
proxy.config.remap.num_remap_threads
proxy.config.res_track_memory
proxy.config.reverse_proxy.enabled
proxy.config.reverse_proxy.oldasxbehavior
proxy.config.snapshot_dir
proxy.config.socks.accept_enabled
proxy.config.socks.accept_port
proxy.config.socks.connection_attempts
proxy.config.socks.default_servers
proxy.config.socks.http_port
proxy.config.socks.per_server_connection_attempts
proxy.config.socks.server_connect_timeout
proxy.config.socks.server_fail_threshold
proxy.config.socks.server_retry_time
proxy.config.socks.server_retry_timeout
proxy.config.socks.socks_config_file
proxy.config.socks.socks_needed
proxy.config.socks.socks_timeout
proxy.config.socks.socks_version
proxy.config.srv_enabled
proxy.config.ssl.CA.cert.filename
proxy.config.ssl.CA.cert.path
proxy.config.ssl.client.CA.cert.filename
proxy.config.ssl.client.CA.cert.path
proxy.config.ssl.client.cert.filename
proxy.config.ssl.client.certification_level
proxy.config.ssl.client.cert.path
proxy.config.ssl.client.private_key.filename
proxy.config.ssl.client.private_key.path
proxy.config.ssl.client.verify.server
proxy.config.ssl.enabled
proxy.config.ssl.number.threads
proxy.config.ssl.server.cert_chain.filename
proxy.config.ssl.server.cert.path
proxy.config.ssl.server.cipher_suite
proxy.config.ssl.server.honor_cipher_order
proxy.config.ssl.SSLv2
proxy.config.ssl.SSLv3
proxy.config.ssl.TLSv1
proxy.config.ssl.compression
proxy.config.ssl.server.multicert.filename
proxy.config.ssl.server_port
proxy.config.ssl.server.private_key.path
proxy.config.stack_dump_enabled
proxy.config.stat_collector.interval
proxy.config.stat_collector.port
proxy.config.stats.config_file
proxy.config.stats.snap_file
proxy.config.stats.snap_frequency
proxy.config.syslog_facility
proxy.config.system.mmap_max
proxy.config.system.file_max_pct
proxy.config.thread.default.stacksize
proxy.config.udp.free_cancelled_pkts_sec
proxy.config.udp.periodic_cleanup
proxy.config.udp.send_retries
proxy.config.update.concurrent_updates
proxy.config.update.enabled
proxy.config.update.force
proxy.config.update.max_update_state_machines
proxy.config.update.memory_use_mb
proxy.config.update.retry_count
proxy.config.update.retry_interval
proxy.config.update.update_configuration
proxy.config.url_remap.default_to_server_pac
proxy.config.url_remap.default_to_server_pac_port
proxy.config.url_remap.filename
proxy.config.url_remap.pristine_host_hdr
proxy.config.url_remap.remap_required
proxy.config.user_name
proxy.config.vmap.addr_file
proxy.config.vmap.down_up_timeout
proxy.config.vmap.enabled
=head1 LICENSE
Simple Apache Traffic Server client object, to communicate with the local manager.
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
#-=-=-=-=-=-=-=-= No more POD for you =-=-=-=-=-=-=-=-
| 34.998696 | 136 | 0.78498 |
eddaa1073025ed5170f32fb100f4c3340a483ed2 | 33,905 | pm | Perl | fatlib/CPAN/Meta/Validator.pm | autarch/cpanminus | 9d7490d221fa5760375d02c5fc42b16a296dc358 | [
"Artistic-1.0"
] | 9 | 2018-04-19T05:08:30.000Z | 2021-11-23T07:36:58.000Z | fatlib/CPAN/Meta/Validator.pm | autarch/cpanminus | 9d7490d221fa5760375d02c5fc42b16a296dc358 | [
"Artistic-1.0"
] | 98 | 2017-11-02T19:00:44.000Z | 2022-03-22T16:15:39.000Z | fatlib/CPAN/Meta/Validator.pm | autarch/cpanminus | 9d7490d221fa5760375d02c5fc42b16a296dc358 | [
"Artistic-1.0"
] | 9 | 2017-10-24T21:53:36.000Z | 2021-11-23T07:36:59.000Z | use 5.006;
use strict;
use warnings;
package CPAN::Meta::Validator;
our $VERSION = '2.150005';
#pod =head1 SYNOPSIS
#pod
#pod my $struct = decode_json_file('META.json');
#pod
#pod my $cmv = CPAN::Meta::Validator->new( $struct );
#pod
#pod unless ( $cmv->is_valid ) {
#pod my $msg = "Invalid META structure. Errors found:\n";
#pod $msg .= join( "\n", $cmv->errors );
#pod die $msg;
#pod }
#pod
#pod =head1 DESCRIPTION
#pod
#pod This module validates a CPAN Meta structure against the version of the
#pod the specification claimed in the C<meta-spec> field of the structure.
#pod
#pod =cut
#--------------------------------------------------------------------------#
# This code copied and adapted from Test::CPAN::Meta
# by Barbie, <[email protected]> for Miss Barbell Productions,
# L<http://www.missbarbell.co.uk>
#--------------------------------------------------------------------------#
#--------------------------------------------------------------------------#
# Specification Definitions
#--------------------------------------------------------------------------#
my %known_specs = (
'1.4' => 'http://module-build.sourceforge.net/META-spec-v1.4.html',
'1.3' => 'http://module-build.sourceforge.net/META-spec-v1.3.html',
'1.2' => 'http://module-build.sourceforge.net/META-spec-v1.2.html',
'1.1' => 'http://module-build.sourceforge.net/META-spec-v1.1.html',
'1.0' => 'http://module-build.sourceforge.net/META-spec-v1.0.html'
);
my %known_urls = map {$known_specs{$_} => $_} keys %known_specs;
my $module_map1 = { 'map' => { ':key' => { name => \&module, value => \&exversion } } };
my $module_map2 = { 'map' => { ':key' => { name => \&module, value => \&version } } };
my $no_index_2 = {
'map' => { file => { list => { value => \&string } },
directory => { list => { value => \&string } },
'package' => { list => { value => \&string } },
namespace => { list => { value => \&string } },
':key' => { name => \&custom_2, value => \&anything },
}
};
my $no_index_1_3 = {
'map' => { file => { list => { value => \&string } },
directory => { list => { value => \&string } },
'package' => { list => { value => \&string } },
namespace => { list => { value => \&string } },
':key' => { name => \&string, value => \&anything },
}
};
my $no_index_1_2 = {
'map' => { file => { list => { value => \&string } },
dir => { list => { value => \&string } },
'package' => { list => { value => \&string } },
namespace => { list => { value => \&string } },
':key' => { name => \&string, value => \&anything },
}
};
my $no_index_1_1 = {
'map' => { ':key' => { name => \&string, list => { value => \&string } },
}
};
my $prereq_map = {
map => {
':key' => {
name => \&phase,
'map' => {
':key' => {
name => \&relation,
%$module_map1,
},
},
}
},
};
my %definitions = (
'2' => {
# REQUIRED
'abstract' => { mandatory => 1, value => \&string },
'author' => { mandatory => 1, list => { value => \&string } },
'dynamic_config' => { mandatory => 1, value => \&boolean },
'generated_by' => { mandatory => 1, value => \&string },
'license' => { mandatory => 1, list => { value => \&license } },
'meta-spec' => {
mandatory => 1,
'map' => {
version => { mandatory => 1, value => \&version},
url => { value => \&url },
':key' => { name => \&custom_2, value => \&anything },
}
},
'name' => { mandatory => 1, value => \&string },
'release_status' => { mandatory => 1, value => \&release_status },
'version' => { mandatory => 1, value => \&version },
# OPTIONAL
'description' => { value => \&string },
'keywords' => { list => { value => \&string } },
'no_index' => $no_index_2,
'optional_features' => {
'map' => {
':key' => {
name => \&string,
'map' => {
description => { value => \&string },
prereqs => $prereq_map,
':key' => { name => \&custom_2, value => \&anything },
}
}
}
},
'prereqs' => $prereq_map,
'provides' => {
'map' => {
':key' => {
name => \&module,
'map' => {
file => { mandatory => 1, value => \&file },
version => { value => \&version },
':key' => { name => \&custom_2, value => \&anything },
}
}
}
},
'resources' => {
'map' => {
license => { list => { value => \&url } },
homepage => { value => \&url },
bugtracker => {
'map' => {
web => { value => \&url },
mailto => { value => \&string},
':key' => { name => \&custom_2, value => \&anything },
}
},
repository => {
'map' => {
web => { value => \&url },
url => { value => \&url },
type => { value => \&string },
':key' => { name => \&custom_2, value => \&anything },
}
},
':key' => { value => \&string, name => \&custom_2 },
}
},
# CUSTOM -- additional user defined key/value pairs
# note we can only validate the key name, as the structure is user defined
':key' => { name => \&custom_2, value => \&anything },
},
'1.4' => {
'meta-spec' => {
mandatory => 1,
'map' => {
version => { mandatory => 1, value => \&version},
url => { mandatory => 1, value => \&urlspec },
':key' => { name => \&string, value => \&anything },
},
},
'name' => { mandatory => 1, value => \&string },
'version' => { mandatory => 1, value => \&version },
'abstract' => { mandatory => 1, value => \&string },
'author' => { mandatory => 1, list => { value => \&string } },
'license' => { mandatory => 1, value => \&license },
'generated_by' => { mandatory => 1, value => \&string },
'distribution_type' => { value => \&string },
'dynamic_config' => { value => \&boolean },
'requires' => $module_map1,
'recommends' => $module_map1,
'build_requires' => $module_map1,
'configure_requires' => $module_map1,
'conflicts' => $module_map2,
'optional_features' => {
'map' => {
':key' => { name => \&string,
'map' => { description => { value => \&string },
requires => $module_map1,
recommends => $module_map1,
build_requires => $module_map1,
conflicts => $module_map2,
':key' => { name => \&string, value => \&anything },
}
}
}
},
'provides' => {
'map' => {
':key' => { name => \&module,
'map' => {
file => { mandatory => 1, value => \&file },
version => { value => \&version },
':key' => { name => \&string, value => \&anything },
}
}
}
},
'no_index' => $no_index_1_3,
'private' => $no_index_1_3,
'keywords' => { list => { value => \&string } },
'resources' => {
'map' => { license => { value => \&url },
homepage => { value => \&url },
bugtracker => { value => \&url },
repository => { value => \&url },
':key' => { value => \&string, name => \&custom_1 },
}
},
# additional user defined key/value pairs
# note we can only validate the key name, as the structure is user defined
':key' => { name => \&string, value => \&anything },
},
'1.3' => {
'meta-spec' => {
mandatory => 1,
'map' => {
version => { mandatory => 1, value => \&version},
url => { mandatory => 1, value => \&urlspec },
':key' => { name => \&string, value => \&anything },
},
},
'name' => { mandatory => 1, value => \&string },
'version' => { mandatory => 1, value => \&version },
'abstract' => { mandatory => 1, value => \&string },
'author' => { mandatory => 1, list => { value => \&string } },
'license' => { mandatory => 1, value => \&license },
'generated_by' => { mandatory => 1, value => \&string },
'distribution_type' => { value => \&string },
'dynamic_config' => { value => \&boolean },
'requires' => $module_map1,
'recommends' => $module_map1,
'build_requires' => $module_map1,
'conflicts' => $module_map2,
'optional_features' => {
'map' => {
':key' => { name => \&string,
'map' => { description => { value => \&string },
requires => $module_map1,
recommends => $module_map1,
build_requires => $module_map1,
conflicts => $module_map2,
':key' => { name => \&string, value => \&anything },
}
}
}
},
'provides' => {
'map' => {
':key' => { name => \&module,
'map' => {
file => { mandatory => 1, value => \&file },
version => { value => \&version },
':key' => { name => \&string, value => \&anything },
}
}
}
},
'no_index' => $no_index_1_3,
'private' => $no_index_1_3,
'keywords' => { list => { value => \&string } },
'resources' => {
'map' => { license => { value => \&url },
homepage => { value => \&url },
bugtracker => { value => \&url },
repository => { value => \&url },
':key' => { value => \&string, name => \&custom_1 },
}
},
# additional user defined key/value pairs
# note we can only validate the key name, as the structure is user defined
':key' => { name => \&string, value => \&anything },
},
# v1.2 is misleading, it seems to assume that a number of fields where created
# within v1.1, when they were created within v1.2. This may have been an
# original mistake, and that a v1.1 was retro fitted into the timeline, when
# v1.2 was originally slated as v1.1. But I could be wrong ;)
'1.2' => {
'meta-spec' => {
mandatory => 1,
'map' => {
version => { mandatory => 1, value => \&version},
url => { mandatory => 1, value => \&urlspec },
':key' => { name => \&string, value => \&anything },
},
},
'name' => { mandatory => 1, value => \&string },
'version' => { mandatory => 1, value => \&version },
'license' => { mandatory => 1, value => \&license },
'generated_by' => { mandatory => 1, value => \&string },
'author' => { mandatory => 1, list => { value => \&string } },
'abstract' => { mandatory => 1, value => \&string },
'distribution_type' => { value => \&string },
'dynamic_config' => { value => \&boolean },
'keywords' => { list => { value => \&string } },
'private' => $no_index_1_2,
'$no_index' => $no_index_1_2,
'requires' => $module_map1,
'recommends' => $module_map1,
'build_requires' => $module_map1,
'conflicts' => $module_map2,
'optional_features' => {
'map' => {
':key' => { name => \&string,
'map' => { description => { value => \&string },
requires => $module_map1,
recommends => $module_map1,
build_requires => $module_map1,
conflicts => $module_map2,
':key' => { name => \&string, value => \&anything },
}
}
}
},
'provides' => {
'map' => {
':key' => { name => \&module,
'map' => {
file => { mandatory => 1, value => \&file },
version => { value => \&version },
':key' => { name => \&string, value => \&anything },
}
}
}
},
'resources' => {
'map' => { license => { value => \&url },
homepage => { value => \&url },
bugtracker => { value => \&url },
repository => { value => \&url },
':key' => { value => \&string, name => \&custom_1 },
}
},
# additional user defined key/value pairs
# note we can only validate the key name, as the structure is user defined
':key' => { name => \&string, value => \&anything },
},
# note that the 1.1 spec only specifies 'version' as mandatory
'1.1' => {
'name' => { value => \&string },
'version' => { mandatory => 1, value => \&version },
'license' => { value => \&license },
'generated_by' => { value => \&string },
'license_uri' => { value => \&url },
'distribution_type' => { value => \&string },
'dynamic_config' => { value => \&boolean },
'private' => $no_index_1_1,
'requires' => $module_map1,
'recommends' => $module_map1,
'build_requires' => $module_map1,
'conflicts' => $module_map2,
# additional user defined key/value pairs
# note we can only validate the key name, as the structure is user defined
':key' => { name => \&string, value => \&anything },
},
# note that the 1.0 spec doesn't specify optional or mandatory fields
# but we will treat version as mandatory since otherwise META 1.0 is
# completely arbitrary and pointless
'1.0' => {
'name' => { value => \&string },
'version' => { mandatory => 1, value => \&version },
'license' => { value => \&license },
'generated_by' => { value => \&string },
'license_uri' => { value => \&url },
'distribution_type' => { value => \&string },
'dynamic_config' => { value => \&boolean },
'requires' => $module_map1,
'recommends' => $module_map1,
'build_requires' => $module_map1,
'conflicts' => $module_map2,
# additional user defined key/value pairs
# note we can only validate the key name, as the structure is user defined
':key' => { name => \&string, value => \&anything },
},
);
#--------------------------------------------------------------------------#
# Code
#--------------------------------------------------------------------------#
#pod =method new
#pod
#pod my $cmv = CPAN::Meta::Validator->new( $struct )
#pod
#pod The constructor must be passed a metadata structure.
#pod
#pod =cut
sub new {
my ($class,$data) = @_;
# create an attributes hash
my $self = {
'data' => $data,
'spec' => eval { $data->{'meta-spec'}{'version'} } || "1.0",
'errors' => undef,
};
# create the object
return bless $self, $class;
}
#pod =method is_valid
#pod
#pod if ( $cmv->is_valid ) {
#pod ...
#pod }
#pod
#pod Returns a boolean value indicating whether the metadata provided
#pod is valid.
#pod
#pod =cut
sub is_valid {
my $self = shift;
my $data = $self->{data};
my $spec_version = $self->{spec};
$self->check_map($definitions{$spec_version},$data);
return ! $self->errors;
}
#pod =method errors
#pod
#pod warn( join "\n", $cmv->errors );
#pod
#pod Returns a list of errors seen during validation.
#pod
#pod =cut
sub errors {
my $self = shift;
return () unless(defined $self->{errors});
return @{$self->{errors}};
}
#pod =begin :internals
#pod
#pod =head2 Check Methods
#pod
#pod =over
#pod
#pod =item *
#pod
#pod check_map($spec,$data)
#pod
#pod Checks whether a map (or hash) part of the data structure conforms to the
#pod appropriate specification definition.
#pod
#pod =item *
#pod
#pod check_list($spec,$data)
#pod
#pod Checks whether a list (or array) part of the data structure conforms to
#pod the appropriate specification definition.
#pod
#pod =item *
#pod
#pod =back
#pod
#pod =cut
my $spec_error = "Missing validation action in specification. "
. "Must be one of 'map', 'list', or 'value'";
sub check_map {
my ($self,$spec,$data) = @_;
if(ref($spec) ne 'HASH') {
$self->_error( "Unknown META specification, cannot validate." );
return;
}
if(ref($data) ne 'HASH') {
$self->_error( "Expected a map structure from string or file." );
return;
}
for my $key (keys %$spec) {
next unless($spec->{$key}->{mandatory});
next if(defined $data->{$key});
push @{$self->{stack}}, $key;
$self->_error( "Missing mandatory field, '$key'" );
pop @{$self->{stack}};
}
for my $key (keys %$data) {
push @{$self->{stack}}, $key;
if($spec->{$key}) {
if($spec->{$key}{value}) {
$spec->{$key}{value}->($self,$key,$data->{$key});
} elsif($spec->{$key}{'map'}) {
$self->check_map($spec->{$key}{'map'},$data->{$key});
} elsif($spec->{$key}{'list'}) {
$self->check_list($spec->{$key}{'list'},$data->{$key});
} else {
$self->_error( "$spec_error for '$key'" );
}
} elsif ($spec->{':key'}) {
$spec->{':key'}{name}->($self,$key,$key);
if($spec->{':key'}{value}) {
$spec->{':key'}{value}->($self,$key,$data->{$key});
} elsif($spec->{':key'}{'map'}) {
$self->check_map($spec->{':key'}{'map'},$data->{$key});
} elsif($spec->{':key'}{'list'}) {
$self->check_list($spec->{':key'}{'list'},$data->{$key});
} else {
$self->_error( "$spec_error for ':key'" );
}
} else {
$self->_error( "Unknown key, '$key', found in map structure" );
}
pop @{$self->{stack}};
}
}
sub check_list {
my ($self,$spec,$data) = @_;
if(ref($data) ne 'ARRAY') {
$self->_error( "Expected a list structure" );
return;
}
if(defined $spec->{mandatory}) {
if(!defined $data->[0]) {
$self->_error( "Missing entries from mandatory list" );
}
}
for my $value (@$data) {
push @{$self->{stack}}, $value || "<undef>";
if(defined $spec->{value}) {
$spec->{value}->($self,'list',$value);
} elsif(defined $spec->{'map'}) {
$self->check_map($spec->{'map'},$value);
} elsif(defined $spec->{'list'}) {
$self->check_list($spec->{'list'},$value);
} elsif ($spec->{':key'}) {
$self->check_map($spec,$value);
} else {
$self->_error( "$spec_error associated with '$self->{stack}[-2]'" );
}
pop @{$self->{stack}};
}
}
#pod =head2 Validator Methods
#pod
#pod =over
#pod
#pod =item *
#pod
#pod header($self,$key,$value)
#pod
#pod Validates that the header is valid.
#pod
#pod Note: No longer used as we now read the data structure, not the file.
#pod
#pod =item *
#pod
#pod url($self,$key,$value)
#pod
#pod Validates that a given value is in an acceptable URL format
#pod
#pod =item *
#pod
#pod urlspec($self,$key,$value)
#pod
#pod Validates that the URL to a META specification is a known one.
#pod
#pod =item *
#pod
#pod string_or_undef($self,$key,$value)
#pod
#pod Validates that the value is either a string or an undef value. Bit of a
#pod catchall function for parts of the data structure that are completely user
#pod defined.
#pod
#pod =item *
#pod
#pod string($self,$key,$value)
#pod
#pod Validates that a string exists for the given key.
#pod
#pod =item *
#pod
#pod file($self,$key,$value)
#pod
#pod Validate that a file is passed for the given key. This may be made more
#pod thorough in the future. For now it acts like \&string.
#pod
#pod =item *
#pod
#pod exversion($self,$key,$value)
#pod
#pod Validates a list of versions, e.g. '<= 5, >=2, ==3, !=4, >1, <6, 0'.
#pod
#pod =item *
#pod
#pod version($self,$key,$value)
#pod
#pod Validates a single version string. Versions of the type '5.8.8' and '0.00_00'
#pod are both valid. A leading 'v' like 'v1.2.3' is also valid.
#pod
#pod =item *
#pod
#pod boolean($self,$key,$value)
#pod
#pod Validates for a boolean value. Currently these values are '1', '0', 'true',
#pod 'false', however the latter 2 may be removed.
#pod
#pod =item *
#pod
#pod license($self,$key,$value)
#pod
#pod Validates that a value is given for the license. Returns 1 if an known license
#pod type, or 2 if a value is given but the license type is not a recommended one.
#pod
#pod =item *
#pod
#pod custom_1($self,$key,$value)
#pod
#pod Validates that the given key is in CamelCase, to indicate a user defined
#pod keyword and only has characters in the class [-_a-zA-Z]. In version 1.X
#pod of the spec, this was only explicitly stated for 'resources'.
#pod
#pod =item *
#pod
#pod custom_2($self,$key,$value)
#pod
#pod Validates that the given key begins with 'x_' or 'X_', to indicate a user
#pod defined keyword and only has characters in the class [-_a-zA-Z]
#pod
#pod =item *
#pod
#pod identifier($self,$key,$value)
#pod
#pod Validates that key is in an acceptable format for the META specification,
#pod for an identifier, i.e. any that matches the regular expression
#pod qr/[a-z][a-z_]/i.
#pod
#pod =item *
#pod
#pod module($self,$key,$value)
#pod
#pod Validates that a given key is in an acceptable module name format, e.g.
#pod 'Test::CPAN::Meta::Version'.
#pod
#pod =back
#pod
#pod =end :internals
#pod
#pod =cut
sub header {
my ($self,$key,$value) = @_;
if(defined $value) {
return 1 if($value && $value =~ /^--- #YAML:1.0/);
}
$self->_error( "file does not have a valid YAML header." );
return 0;
}
sub release_status {
my ($self,$key,$value) = @_;
if(defined $value) {
my $version = $self->{data}{version} || '';
if ( $version =~ /_/ ) {
return 1 if ( $value =~ /\A(?:testing|unstable)\z/ );
$self->_error( "'$value' for '$key' is invalid for version '$version'" );
}
else {
return 1 if ( $value =~ /\A(?:stable|testing|unstable)\z/ );
$self->_error( "'$value' for '$key' is invalid" );
}
}
else {
$self->_error( "'$key' is not defined" );
}
return 0;
}
# _uri_split taken from URI::Split by Gisle Aas, Copyright 2003
sub _uri_split {
return $_[0] =~ m,(?:([^:/?#]+):)?(?://([^/?#]*))?([^?#]*)(?:\?([^#]*))?(?:#(.*))?,;
}
sub url {
my ($self,$key,$value) = @_;
if(defined $value) {
my ($scheme, $auth, $path, $query, $frag) = _uri_split($value);
unless ( defined $scheme && length $scheme ) {
$self->_error( "'$value' for '$key' does not have a URL scheme" );
return 0;
}
unless ( defined $auth && length $auth ) {
$self->_error( "'$value' for '$key' does not have a URL authority" );
return 0;
}
return 1;
}
$value ||= '';
$self->_error( "'$value' for '$key' is not a valid URL." );
return 0;
}
sub urlspec {
my ($self,$key,$value) = @_;
if(defined $value) {
return 1 if($value && $known_specs{$self->{spec}} eq $value);
if($value && $known_urls{$value}) {
$self->_error( 'META specification URL does not match version' );
return 0;
}
}
$self->_error( 'Unknown META specification' );
return 0;
}
sub anything { return 1 }
sub string {
my ($self,$key,$value) = @_;
if(defined $value) {
return 1 if($value || $value =~ /^0$/);
}
$self->_error( "value is an undefined string" );
return 0;
}
sub string_or_undef {
my ($self,$key,$value) = @_;
return 1 unless(defined $value);
return 1 if($value || $value =~ /^0$/);
$self->_error( "No string defined for '$key'" );
return 0;
}
sub file {
my ($self,$key,$value) = @_;
return 1 if(defined $value);
$self->_error( "No file defined for '$key'" );
return 0;
}
sub exversion {
my ($self,$key,$value) = @_;
if(defined $value && ($value || $value =~ /0/)) {
my $pass = 1;
for(split(",",$value)) { $self->version($key,$_) or ($pass = 0); }
return $pass;
}
$value = '<undef>' unless(defined $value);
$self->_error( "'$value' for '$key' is not a valid version." );
return 0;
}
sub version {
my ($self,$key,$value) = @_;
if(defined $value) {
return 0 unless($value || $value =~ /0/);
return 1 if($value =~ /^\s*((<|<=|>=|>|!=|==)\s*)?v?\d+((\.\d+((_|\.)\d+)?)?)/);
} else {
$value = '<undef>';
}
$self->_error( "'$value' for '$key' is not a valid version." );
return 0;
}
sub boolean {
my ($self,$key,$value) = @_;
if(defined $value) {
return 1 if($value =~ /^(0|1|true|false)$/);
} else {
$value = '<undef>';
}
$self->_error( "'$value' for '$key' is not a boolean value." );
return 0;
}
my %v1_licenses = (
'perl' => 'http://dev.perl.org/licenses/',
'gpl' => 'http://www.opensource.org/licenses/gpl-license.php',
'apache' => 'http://apache.org/licenses/LICENSE-2.0',
'artistic' => 'http://opensource.org/licenses/artistic-license.php',
'artistic_2' => 'http://opensource.org/licenses/artistic-license-2.0.php',
'lgpl' => 'http://www.opensource.org/licenses/lgpl-license.php',
'bsd' => 'http://www.opensource.org/licenses/bsd-license.php',
'gpl' => 'http://www.opensource.org/licenses/gpl-license.php',
'mit' => 'http://opensource.org/licenses/mit-license.php',
'mozilla' => 'http://opensource.org/licenses/mozilla1.1.php',
'open_source' => undef,
'unrestricted' => undef,
'restrictive' => undef,
'unknown' => undef,
);
my %v2_licenses = map { $_ => 1 } qw(
agpl_3
apache_1_1
apache_2_0
artistic_1
artistic_2
bsd
freebsd
gfdl_1_2
gfdl_1_3
gpl_1
gpl_2
gpl_3
lgpl_2_1
lgpl_3_0
mit
mozilla_1_0
mozilla_1_1
openssl
perl_5
qpl_1_0
ssleay
sun
zlib
open_source
restricted
unrestricted
unknown
);
sub license {
my ($self,$key,$value) = @_;
my $licenses = $self->{spec} < 2 ? \%v1_licenses : \%v2_licenses;
if(defined $value) {
return 1 if($value && exists $licenses->{$value});
} else {
$value = '<undef>';
}
$self->_error( "License '$value' is invalid" );
return 0;
}
sub custom_1 {
my ($self,$key) = @_;
if(defined $key) {
# a valid user defined key should be alphabetic
# and contain at least one capital case letter.
return 1 if($key && $key =~ /^[_a-z]+$/i && $key =~ /[A-Z]/);
} else {
$key = '<undef>';
}
$self->_error( "Custom resource '$key' must be in CamelCase." );
return 0;
}
sub custom_2 {
my ($self,$key) = @_;
if(defined $key) {
return 1 if($key && $key =~ /^x_/i); # user defined
} else {
$key = '<undef>';
}
$self->_error( "Custom key '$key' must begin with 'x_' or 'X_'." );
return 0;
}
sub identifier {
my ($self,$key) = @_;
if(defined $key) {
return 1 if($key && $key =~ /^([a-z][_a-z]+)$/i); # spec 2.0 defined
} else {
$key = '<undef>';
}
$self->_error( "Key '$key' is not a legal identifier." );
return 0;
}
sub module {
my ($self,$key) = @_;
if(defined $key) {
return 1 if($key && $key =~ /^[A-Za-z0-9_]+(::[A-Za-z0-9_]+)*$/);
} else {
$key = '<undef>';
}
$self->_error( "Key '$key' is not a legal module name." );
return 0;
}
my @valid_phases = qw/ configure build test runtime develop /;
sub phase {
my ($self,$key) = @_;
if(defined $key) {
return 1 if( length $key && grep { $key eq $_ } @valid_phases );
return 1 if $key =~ /x_/i;
} else {
$key = '<undef>';
}
$self->_error( "Key '$key' is not a legal phase." );
return 0;
}
my @valid_relations = qw/ requires recommends suggests conflicts /;
sub relation {
my ($self,$key) = @_;
if(defined $key) {
return 1 if( length $key && grep { $key eq $_ } @valid_relations );
return 1 if $key =~ /x_/i;
} else {
$key = '<undef>';
}
$self->_error( "Key '$key' is not a legal prereq relationship." );
return 0;
}
sub _error {
my $self = shift;
my $mess = shift;
$mess .= ' ('.join(' -> ',@{$self->{stack}}).')' if($self->{stack});
$mess .= " [Validation: $self->{spec}]";
push @{$self->{errors}}, $mess;
}
1;
# ABSTRACT: validate CPAN distribution metadata structures
=pod
=encoding UTF-8
=head1 NAME
CPAN::Meta::Validator - validate CPAN distribution metadata structures
=head1 VERSION
version 2.150005
=head1 SYNOPSIS
my $struct = decode_json_file('META.json');
my $cmv = CPAN::Meta::Validator->new( $struct );
unless ( $cmv->is_valid ) {
my $msg = "Invalid META structure. Errors found:\n";
$msg .= join( "\n", $cmv->errors );
die $msg;
}
=head1 DESCRIPTION
This module validates a CPAN Meta structure against the version of the
the specification claimed in the C<meta-spec> field of the structure.
=head1 METHODS
=head2 new
my $cmv = CPAN::Meta::Validator->new( $struct )
The constructor must be passed a metadata structure.
=head2 is_valid
if ( $cmv->is_valid ) {
...
}
Returns a boolean value indicating whether the metadata provided
is valid.
=head2 errors
warn( join "\n", $cmv->errors );
Returns a list of errors seen during validation.
=begin :internals
=head2 Check Methods
=over
=item *
check_map($spec,$data)
Checks whether a map (or hash) part of the data structure conforms to the
appropriate specification definition.
=item *
check_list($spec,$data)
Checks whether a list (or array) part of the data structure conforms to
the appropriate specification definition.
=item *
=back
=head2 Validator Methods
=over
=item *
header($self,$key,$value)
Validates that the header is valid.
Note: No longer used as we now read the data structure, not the file.
=item *
url($self,$key,$value)
Validates that a given value is in an acceptable URL format
=item *
urlspec($self,$key,$value)
Validates that the URL to a META specification is a known one.
=item *
string_or_undef($self,$key,$value)
Validates that the value is either a string or an undef value. Bit of a
catchall function for parts of the data structure that are completely user
defined.
=item *
string($self,$key,$value)
Validates that a string exists for the given key.
=item *
file($self,$key,$value)
Validate that a file is passed for the given key. This may be made more
thorough in the future. For now it acts like \&string.
=item *
exversion($self,$key,$value)
Validates a list of versions, e.g. '<= 5, >=2, ==3, !=4, >1, <6, 0'.
=item *
version($self,$key,$value)
Validates a single version string. Versions of the type '5.8.8' and '0.00_00'
are both valid. A leading 'v' like 'v1.2.3' is also valid.
=item *
boolean($self,$key,$value)
Validates for a boolean value. Currently these values are '1', '0', 'true',
'false', however the latter 2 may be removed.
=item *
license($self,$key,$value)
Validates that a value is given for the license. Returns 1 if an known license
type, or 2 if a value is given but the license type is not a recommended one.
=item *
custom_1($self,$key,$value)
Validates that the given key is in CamelCase, to indicate a user defined
keyword and only has characters in the class [-_a-zA-Z]. In version 1.X
of the spec, this was only explicitly stated for 'resources'.
=item *
custom_2($self,$key,$value)
Validates that the given key begins with 'x_' or 'X_', to indicate a user
defined keyword and only has characters in the class [-_a-zA-Z]
=item *
identifier($self,$key,$value)
Validates that key is in an acceptable format for the META specification,
for an identifier, i.e. any that matches the regular expression
qr/[a-z][a-z_]/i.
=item *
module($self,$key,$value)
Validates that a given key is in an acceptable module name format, e.g.
'Test::CPAN::Meta::Version'.
=back
=end :internals
=for Pod::Coverage anything boolean check_list custom_1 custom_2 exversion file
identifier license module phase relation release_status string string_or_undef
url urlspec version header check_map
=head1 BUGS
Please report any bugs or feature using the CPAN Request Tracker.
Bugs can be submitted through the web interface at
L<http://rt.cpan.org/Dist/Display.html?Queue=CPAN-Meta>
When submitting a bug or request, please include a test-file or a patch to an
existing test-file that illustrates the bug or desired feature.
=head1 AUTHORS
=over 4
=item *
David Golden <[email protected]>
=item *
Ricardo Signes <[email protected]>
=back
=head1 COPYRIGHT AND LICENSE
This software is copyright (c) 2010 by David Golden and Ricardo Signes.
This is free software; you can redistribute it and/or modify it under
the same terms as the Perl 5 programming language system itself.
=cut
__END__
# vim: ts=2 sts=2 sw=2 et :
| 27.997523 | 91 | 0.518921 |
edd40bc7239a801f2ebd1495d039fc90e8932592 | 123 | pl | Perl | Chapter04/plus-for-str.pl | PacktPublishing/Perl-6-Deep-Dive | b47fadd6bd65efd38ed4860109edc5018ce98924 | [
"MIT"
] | 9 | 2017-12-28T13:41:36.000Z | 2021-12-20T03:31:06.000Z | Chapter04/plus-for-str.pl | PacktPublishing/Perl-6-Deep-Dive | b47fadd6bd65efd38ed4860109edc5018ce98924 | [
"MIT"
] | 1 | 2020-01-29T07:23:03.000Z | 2020-12-01T07:38:06.000Z | Chapter04/plus-for-str.pl | PacktPublishing/Perl-6-Deep-Dive | b47fadd6bd65efd38ed4860109edc5018ce98924 | [
"MIT"
] | 2 | 2017-12-13T10:11:15.000Z | 2019-05-24T00:38:23.000Z | multi sub infix:<+>(Int $a, Str $b) {
~$a ~ $b
}
multi sub infix:<+>(Str $a, Str $b) {
$a ~ $b;
}
say "4" + "9";
| 12.3 | 37 | 0.414634 |
ed649d2307888e662bdc50adf3b927362645f272 | 2,200 | pm | Perl | projects/CRM/LedgerSMB-master/LedgerSMB/Report/PNL/Invoice.pm | tridentcodesolution/jpankleshwaria.github.io | ba4774c4c473238ed277537efdfd7bb6b24d0fd9 | [
"Apache-2.0"
] | null | null | null | projects/CRM/LedgerSMB-master/LedgerSMB/Report/PNL/Invoice.pm | tridentcodesolution/jpankleshwaria.github.io | ba4774c4c473238ed277537efdfd7bb6b24d0fd9 | [
"Apache-2.0"
] | null | null | null | projects/CRM/LedgerSMB-master/LedgerSMB/Report/PNL/Invoice.pm | tridentcodesolution/jpankleshwaria.github.io | ba4774c4c473238ed277537efdfd7bb6b24d0fd9 | [
"Apache-2.0"
] | null | null | null | =head1 NAME
LedgerSMB::Report::PNL::Invoice - Provides an Income Statement-like report on
invoices
=head1 SYNPOSIS
my $rpt = LedgerSMB::Report::PNL::Invoice->new(%$request);
$rpt->render($request);
=head1 DESCRIPTION
This provides the income statement-like report for invoices on LedgerSMB on
1.4 and later. This report is designed to give a business an ability to look
profit margins of specific invoices.
=cut
package LedgerSMB::Report::PNL::Invoice;
use Moose;
extends 'LedgerSMB::Report::PNL';
=head1 CRITERIA PROPERTIES
=over
=item id
This is the id of the invoice
=cut
has id => (is => 'ro', isa =>'Int', required => 1);
=item invnumber
Invoice number
=cut
has invnumber => (is => 'rw', isa =>'Str');
=item transdate
Transaction Date
=cut
has transdate => (is => 'rw', isa =>'LedgerSMB::Moose::Date', coerce=> 1);
=item name
Customer/vendor name
=cut
has invnumber => (is => 'rw', isa =>'Str');
=back
=head1 CONSTANT REPORT-RELATED FUNCTIONS
=over
=item template
=cut
sub template { return 'Reports/PNL' }
=item name
=cut
sub name { my ($self) = @_; return $self->Text('Invoice Profit/Loss') }
=item header_lines
=cut
sub header_lines {
my ($self) = @_;
return [{name => 'name',
text => $self->Text('Name') },
{name => 'invnumber',
text => $self->Text('Invoice Number') },
{name => 'transdate',
text => $self->Text('Transaction Date') },
];
}
=back
=head1 METHODS
=cut
# private method
# report_base($from, $to)
# returns an array of hashrefs of report results. Used in adding comparison
# as well as the main report
sub report_base {
my ($self) = @_;
return $self->call_dbmethod(funcname => 'pnl__invoice');
}
=head1 SEE ALSO
=over
=item LedgerSMB::DBObject
=item LedgerSMB::DBObject::Moose
=item LedgerSMB::MooseTypes
=item LedgerSMB::Report
=item LedgerSMB::Report::Dates
=item LedgerSMB::Report::PNL
=back
=head1 COPYRIGHT
COPYRIGHT (C) 2012 The LedgerSMB Core Team. This file may be re-used under the
terms of the LedgerSMB General Public License version 2 or at your option any
later version. Please see enclosed LICENSE file for details.
=cut
1;
| 16.296296 | 79 | 0.672727 |
edc47c2253db62296e5548aa25388b0e3597bbda | 1,448 | pm | Perl | auto-lib/Paws/CognitoIdp/NewDeviceMetadataType.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 164 | 2015-01-08T14:58:53.000Z | 2022-02-20T19:16:24.000Z | auto-lib/Paws/CognitoIdp/NewDeviceMetadataType.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 348 | 2015-01-07T22:08:38.000Z | 2022-01-27T14:34:44.000Z | auto-lib/Paws/CognitoIdp/NewDeviceMetadataType.pm | 0leksii/aws-sdk-perl | b2132fe3c79a06fd15b6137e8a0eb628de722e0f | [
"Apache-2.0"
] | 87 | 2015-04-22T06:29:47.000Z | 2021-09-29T14:45:55.000Z | # Generated by default/object.tt
package Paws::CognitoIdp::NewDeviceMetadataType;
use Moose;
has DeviceGroupKey => (is => 'ro', isa => 'Str');
has DeviceKey => (is => 'ro', isa => 'Str');
1;
### main pod documentation begin ###
=head1 NAME
Paws::CognitoIdp::NewDeviceMetadataType
=head1 USAGE
This class represents one of two things:
=head3 Arguments in a call to a service
Use the attributes of this class as arguments to methods. You shouldn't make instances of this class.
Each attribute should be used as a named argument in the calls that expect this type of object.
As an example, if Att1 is expected to be a Paws::CognitoIdp::NewDeviceMetadataType object:
$service_obj->Method(Att1 => { DeviceGroupKey => $value, ..., DeviceKey => $value });
=head3 Results returned from an API call
Use accessors for each attribute. If Att1 is expected to be an Paws::CognitoIdp::NewDeviceMetadataType object:
$result = $service_obj->Method(...);
$result->Att1->DeviceGroupKey
=head1 DESCRIPTION
The new device metadata type.
=head1 ATTRIBUTES
=head2 DeviceGroupKey => Str
The device group key.
=head2 DeviceKey => Str
The device key.
=head1 SEE ALSO
This class forms part of L<Paws>, describing an object used in L<Paws::CognitoIdp>
=head1 BUGS and CONTRIBUTIONS
The source code is located here: L<https://github.com/pplu/aws-sdk-perl>
Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues>
=cut
| 22.276923 | 110 | 0.732735 |
edcbeebfd7532c7c3e5ee665dca23ff627a5b3a1 | 39,292 | pm | Perl | lib/Perl/Critic.pm | schwern/Perl-Critic | 55b2404328183a6487d5a3eceb67412c508f0eab | [
"Artistic-1.0"
] | 2 | 2016-08-27T09:29:20.000Z | 2019-04-19T23:05:50.000Z | lib/Perl/Critic.pm | schwern/Perl-Critic | 55b2404328183a6487d5a3eceb67412c508f0eab | [
"Artistic-1.0"
] | null | null | null | lib/Perl/Critic.pm | schwern/Perl-Critic | 55b2404328183a6487d5a3eceb67412c508f0eab | [
"Artistic-1.0"
] | null | null | null | ##############################################################################
# $URL$
# $Date$
# $Author$
# $Revision$
##############################################################################
package Perl::Critic;
use 5.006001;
use strict;
use warnings;
use English qw(-no_match_vars);
use Readonly;
use base qw(Exporter);
use File::Spec;
use List::MoreUtils qw< firstidx >;
use Scalar::Util qw< blessed >;
use Perl::Critic::Exception::Configuration::Generic;
use Perl::Critic::Config;
use Perl::Critic::Violation;
use Perl::Critic::Document;
use Perl::Critic::Statistics;
use Perl::Critic::Utils qw< :characters hashify shebang_line >;
#-----------------------------------------------------------------------------
our $VERSION = '1.117';
Readonly::Array our @EXPORT_OK => qw(critique);
#=============================================================================
# PUBLIC methods
sub new {
my ( $class, %args ) = @_;
my $self = bless {}, $class;
$self->{_config} = $args{-config} || Perl::Critic::Config->new( %args );
$self->{_stats} = Perl::Critic::Statistics->new();
return $self;
}
#-----------------------------------------------------------------------------
sub config {
my $self = shift;
return $self->{_config};
}
#-----------------------------------------------------------------------------
sub add_policy {
my ( $self, @args ) = @_;
#Delegate to Perl::Critic::Config
return $self->config()->add_policy( @args );
}
#-----------------------------------------------------------------------------
sub policies {
my $self = shift;
#Delegate to Perl::Critic::Config
return $self->config()->policies();
}
#-----------------------------------------------------------------------------
sub statistics {
my $self = shift;
return $self->{_stats};
}
#-----------------------------------------------------------------------------
sub critique { ## no critic (ArgUnpacking)
#-------------------------------------------------------------------
# This subroutine can be called as an object method or as a static
# function. In the latter case, the first argument can be a
# hashref of configuration parameters that shall be used to create
# an object behind the scenes. Note that this object does not
# persist. In other words, it is not a singleton. Here are some
# of the ways this subroutine might get called:
#
# #Object style...
# $critic->critique( $code );
#
# #Functional style...
# critique( $code );
# critique( {}, $code );
# critique( {-foo => bar}, $code );
#------------------------------------------------------------------
my ( $self, $source_code ) = @_ >= 2 ? @_ : ( {}, $_[0] );
$self = ref $self eq 'HASH' ? __PACKAGE__->new(%{ $self }) : $self;
return if not defined $source_code; # If no code, then nothing to do.
my $config = $self->config();
my $doc =
blessed($source_code) && $source_code->isa('Perl::Critic::Document')
? $source_code
: Perl::Critic::Document->new(
'-source' => $source_code,
'-program-extensions' => [$config->program_extensions_as_regexes()],
);
if ( 0 == $self->policies() ) {
Perl::Critic::Exception::Configuration::Generic->throw(
message => 'There are no enabled policies.',
)
}
return $self->_gather_violations($doc);
}
#=============================================================================
# PRIVATE methods
sub _gather_violations {
my ($self, $doc) = @_;
# Disable exempt code lines, if desired
if ( not $self->config->force() ) {
$doc->process_annotations();
}
# Evaluate each policy
my @policies = $self->config->policies();
my @ordered_policies = _futz_with_policy_order(@policies);
my @violations = map { _critique($_, $doc) } @ordered_policies;
# Accumulate statistics
$self->statistics->accumulate( $doc, \@violations );
# If requested, rank violations by their severity and return the top N.
if ( @violations && (my $top = $self->config->top()) ) {
my $limit = @violations < $top ? $#violations : $top-1;
@violations = Perl::Critic::Violation::sort_by_severity(@violations);
@violations = ( reverse @violations )[ 0 .. $limit ]; #Slicing...
}
# Always return violations sorted by location
return Perl::Critic::Violation->sort_by_location(@violations);
}
#=============================================================================
# PRIVATE functions
sub _critique {
my ($policy, $doc) = @_;
return if not $policy->prepare_to_scan_document($doc);
my $maximum_violations = $policy->get_maximum_violations_per_document();
return if defined $maximum_violations && $maximum_violations == 0;
my @violations = ();
TYPE:
for my $type ( $policy->applies_to() ) {
my @elements;
if ($type eq 'PPI::Document') {
@elements = ($doc);
}
else {
@elements = @{ $doc->find($type) || [] };
}
ELEMENT:
for my $element (@elements) {
# Evaluate the policy on this $element. A policy may
# return zero or more violations. We only want the
# violations that occur on lines that have not been
# disabled.
VIOLATION:
for my $violation ( $policy->violates( $element, $doc ) ) {
my $line = $violation->location()->[0];
if ( $doc->line_is_disabled_for_policy($line, $policy) ) {
$doc->add_suppressed_violation($violation);
next VIOLATION;
}
push @violations, $violation;
last TYPE if defined $maximum_violations and @violations >= $maximum_violations;
}
}
}
return @violations;
}
#-----------------------------------------------------------------------------
sub _futz_with_policy_order {
# The ProhibitUselessNoCritic policy is another special policy. It
# deals with the violations that *other* Policies produce. Therefore
# it needs to be run *after* all the other Policies. TODO: find
# a way for Policies to express an ordering preference somehow.
my @policy_objects = @_;
my $magical_policy_name = 'Perl::Critic::Policy::Miscellanea::ProhibitUselessNoCritic';
my $idx = firstidx {ref $_ eq $magical_policy_name} @policy_objects;
push @policy_objects, splice @policy_objects, $idx, 1;
return @policy_objects;
}
#-----------------------------------------------------------------------------
1;
__END__
=pod
=for stopwords DGR INI-style API -params pbp refactored ActivePerl ben Jore
Dolan's Twitter Alexandr Ciornii Ciornii's downloadable
=head1 NAME
Perl::Critic - Critique Perl source code for best-practices.
=head1 SYNOPSIS
use Perl::Critic;
my $file = shift;
my $critic = Perl::Critic->new();
my @violations = $critic->critique($file);
print @violations;
=head1 DESCRIPTION
Perl::Critic is an extensible framework for creating and applying
coding standards to Perl source code. Essentially, it is a static
source code analysis engine. Perl::Critic is distributed with a
number of L<Perl::Critic::Policy|Perl::Critic::Policy> modules that
attempt to enforce various coding guidelines. Most Policy modules are
based on Damian Conway's book B<Perl Best Practices>. However,
Perl::Critic is B<not> limited to PBP and will even support Policies
that contradict Conway. You can enable, disable, and customize those
Polices through the Perl::Critic interface. You can also create new
Policy modules that suit your own tastes.
For a command-line interface to Perl::Critic, see the documentation
for L<perlcritic|perlcritic>. If you want to integrate Perl::Critic
with your build process, L<Test::Perl::Critic|Test::Perl::Critic>
provides an interface that is suitable for test programs. Also,
L<Test::Perl::Critic::Progressive|Test::Perl::Critic::Progressive> is
useful for gradually applying coding standards to legacy code. For
the ultimate convenience (at the expense of some flexibility) see the
L<criticism|criticism> pragma.
Win32 and ActivePerl users can find PPM distributions of Perl::Critic at
L<http://theoryx5.uwinnipeg.ca/ppms/> and Alexandr Ciornii's downloadable
executable at L<http://chorny.net/perl/perlcritic.html>.
If you'd like to try L<Perl::Critic|Perl::Critic> without installing
anything, there is a web-service available at
L<http://perlcritic.com>. The web-service does not yet support all
the configuration features that are available in the native
Perl::Critic API, but it should give you a good idea of what it does.
You can also invoke the perlcritic web-service from the command-line
by doing an HTTP-post, such as one of these:
$> POST http://perlcritic.com/perl/critic.pl < MyModule.pm
$> lwp-request -m POST http://perlcritic.com/perl/critic.pl < MyModule.pm
$> wget -q -O - --post-file=MyModule.pm http://perlcritic.com/perl/critic.pl
Please note that the perlcritic web-service is still alpha code. The
URL and interface to the service are subject to change.
Also, the Perl Development Kit (PDK 8.0) from ActiveState includes a very
slick graphical interface to Perl-Critic. For details, go to
L<http://www.activestate.com/perl_dev_kit>
=head1 INTERFACE SUPPORT
This is considered to be a public class. Any changes to its interface
will go through a deprecation cycle.
=head1 CONSTRUCTOR
=over
=item C<< new( [ -profile => $FILE, -severity => $N, -theme => $string, -include => \@PATTERNS, -exclude => \@PATTERNS, -top => $N, -only => $B, -profile-strictness => $PROFILE_STRICTNESS_{WARN|FATAL|QUIET}, -force => $B, -verbose => $N ], -color => $B, -pager => $string, -allow-unsafe => $B, -criticism-fatal => $B) >>
=item C<< new() >>
Returns a reference to a new Perl::Critic object. Most arguments are
just passed directly into
L<Perl::Critic::Config|Perl::Critic::Config>, but I have described
them here as well. The default value for all arguments can be defined
in your F<.perlcriticrc> file. See the L<"CONFIGURATION"> section for
more information about that. All arguments are optional key-value
pairs as follows:
B<-profile> is a path to a configuration file. If C<$FILE> is not
defined, Perl::Critic::Config attempts to find a F<.perlcriticrc>
configuration file in the current directory, and then in your home
directory. Alternatively, you can set the C<PERLCRITIC> environment
variable to point to a file in another location. If a configuration
file can't be found, or if C<$FILE> is an empty string, then all
Policies will be loaded with their default configuration. See
L<"CONFIGURATION"> for more information.
B<-severity> is the minimum severity level. Only Policy modules that
have a severity greater than C<$N> will be applied. Severity values
are integers ranging from 1 (least severe violations) to 5 (most
severe violations). The default is 5. For a given C<-profile>,
decreasing the C<-severity> will usually reveal more Policy violations.
You can set the default value for this option in your F<.perlcriticrc>
file. Users can redefine the severity level for any Policy in their
F<.perlcriticrc> file. See L<"CONFIGURATION"> for more information.
If it is difficult for you to remember whether severity "5" is the
most or least restrictive level, then you can use one of these named
values:
SEVERITY NAME ...is equivalent to... SEVERITY NUMBER
--------------------------------------------------------
-severity => 'gentle' -severity => 5
-severity => 'stern' -severity => 4
-severity => 'harsh' -severity => 3
-severity => 'cruel' -severity => 2
-severity => 'brutal' -severity => 1
The names reflect how severely the code is criticized: a C<gentle>
criticism reports only the most severe violations, and so on down to a
C<brutal> criticism which reports even the most minor violations.
B<-theme> is special expression that determines which Policies to
apply based on their respective themes. For example, the following
would load only Policies that have a 'bugs' AND 'pbp' theme:
my $critic = Perl::Critic->new( -theme => 'bugs && pbp' );
Unless the C<-severity> option is explicitly given, setting C<-theme>
silently causes the C<-severity> to be set to 1. You can set the
default value for this option in your F<.perlcriticrc> file. See the
L<"POLICY THEMES"> section for more information about themes.
B<-include> is a reference to a list of string C<@PATTERNS>. Policy
modules that match at least one C<m/$PATTERN/ixms> will always be
loaded, irrespective of all other settings. For example:
my $critic = Perl::Critic->new(-include => ['layout'] -severity => 4);
This would cause Perl::Critic to apply all the C<CodeLayout::*> Policy
modules even though they have a severity level that is less than 4.
You can set the default value for this option in your F<.perlcriticrc>
file. You can also use C<-include> in conjunction with the
C<-exclude> option. Note that C<-exclude> takes precedence over
C<-include> when a Policy matches both patterns.
B<-exclude> is a reference to a list of string C<@PATTERNS>. Policy
modules that match at least one C<m/$PATTERN/ixms> will not be loaded,
irrespective of all other settings. For example:
my $critic = Perl::Critic->new(-exclude => ['strict'] -severity => 1);
This would cause Perl::Critic to not apply the C<RequireUseStrict> and
C<ProhibitNoStrict> Policy modules even though they have a severity
level that is greater than 1. You can set the default value for this
option in your F<.perlcriticrc> file. You can also use C<-exclude> in
conjunction with the C<-include> option. Note that C<-exclude> takes
precedence over C<-include> when a Policy matches both patterns.
B<-single-policy> is a string C<PATTERN>. Only one policy that
matches C<m/$PATTERN/ixms> will be used. Policies that do not match
will be excluded. This option has precedence over the C<-severity>,
C<-theme>, C<-include>, C<-exclude>, and C<-only> options. You can
set the default value for this option in your F<.perlcriticrc> file.
B<-top> is the maximum number of Violations to return when ranked by
their severity levels. This must be a positive integer. Violations
are still returned in the order that they occur within the file.
Unless the C<-severity> option is explicitly given, setting C<-top>
silently causes the C<-severity> to be set to 1. You can set the
default value for this option in your F<.perlcriticrc> file.
B<-only> is a boolean value. If set to a true value, Perl::Critic
will only choose from Policies that are mentioned in the user's
profile. If set to a false value (which is the default), then
Perl::Critic chooses from all the Policies that it finds at your site.
You can set the default value for this option in your F<.perlcriticrc>
file.
B<-profile-strictness> is an enumerated value, one of
L<Perl::Critic::Utils::Constants/"$PROFILE_STRICTNESS_WARN"> (the
default),
L<Perl::Critic::Utils::Constants/"$PROFILE_STRICTNESS_FATAL">, and
L<Perl::Critic::Utils::Constants/"$PROFILE_STRICTNESS_QUIET">. If set
to L<Perl::Critic::Utils::Constants/"$PROFILE_STRICTNESS_FATAL">,
Perl::Critic will make certain warnings about problems found in a
F<.perlcriticrc> or file specified via the B<-profile> option fatal.
For example, Perl::Critic normally only C<warn>s about profiles
referring to non-existent Policies, but this value makes this
situation fatal. Correspondingly,
L<Perl::Critic::Utils::Constants/"$PROFILE_STRICTNESS_QUIET"> makes
Perl::Critic shut up about these things.
B<-force> is a boolean value that controls whether Perl::Critic
observes the magical C<"## no critic"> annotations in your code.
If set to a true value, Perl::Critic will analyze all code. If set to
a false value (which is the default) Perl::Critic will ignore code
that is tagged with these annotations. See L<"BENDING THE RULES"> for
more information. You can set the default value for this option in
your F<.perlcriticrc> file.
B<-verbose> can be a positive integer (from 1 to 11), or a literal
format specification. See
L<Perl::Critic::Violation|Perl::Critic::Violation> for an explanation
of format specifications. You can set the default value for this
option in your F<.perlcriticrc> file.
B<-unsafe> directs Perl::Critic to allow the use of Policies that are marked
as "unsafe" by the author. Such policies may compile untrusted code or do
other nefarious things.
B<-color> and B<-pager> are not used by Perl::Critic but is provided for the benefit
of L<perlcritic|perlcritic>.
B<-criticism-fatal> is not used by Perl::Critic but is provided for
the benefit of L<criticism|criticism>.
B<-color-severity-highest>, B<-color-severity-high>,
B<-color-severity-medium>, B<-color-severity-low>, and
B<-color-severity-lowest> are not used by Perl::Critic, but are provided for
the benefit of L<perlcritic|perlcritic>. Each is set to the Term::ANSIColor
color specification to be used to display violations of the corresponding
severity.
B<-files-with-violations> and B<-files-without-violations> are not used by
Perl::Critic, but are provided for the benefit of L<perlcritic|perlcritic>, to
cause only the relevant filenames to be displayed.
=back
=head1 METHODS
=over
=item C<critique( $source_code )>
Runs the C<$source_code> through the Perl::Critic engine using all the
Policies that have been loaded into this engine. If C<$source_code>
is a scalar reference, then it is treated as a string of actual Perl
code. If C<$source_code> is a reference to an instance of
L<PPI::Document|PPI::Document>, then that instance is used directly.
Otherwise, it is treated as a path to a local file containing Perl
code. This method returns a list of
L<Perl::Critic::Violation|Perl::Critic::Violation> objects for each
violation of the loaded Policies. The list is sorted in the order
that the Violations appear in the code. If there are no violations,
this method returns an empty list.
=item C<< add_policy( -policy => $policy_name, -params => \%param_hash ) >>
Creates a Policy object and loads it into this Critic. If the object
cannot be instantiated, it will throw a fatal exception. Otherwise,
it returns a reference to this Critic.
B<-policy> is the name of a
L<Perl::Critic::Policy|Perl::Critic::Policy> subclass module. The
C<'Perl::Critic::Policy'> portion of the name can be omitted for
brevity. This argument is required.
B<-params> is an optional reference to a hash of Policy parameters.
The contents of this hash reference will be passed into to the
constructor of the Policy module. See the documentation in the
relevant Policy module for a description of the arguments it supports.
=item C< policies() >
Returns a list containing references to all the Policy objects that
have been loaded into this engine. Objects will be in the order that
they were loaded.
=item C< config() >
Returns the L<Perl::Critic::Config|Perl::Critic::Config> object that
was created for or given to this Critic.
=item C< statistics() >
Returns the L<Perl::Critic::Statistics|Perl::Critic::Statistics>
object that was created for this Critic. The Statistics object
accumulates data for all files that are analyzed by this Critic.
=back
=head1 FUNCTIONAL INTERFACE
For those folks who prefer to have a functional interface, The
C<critique> method can be exported on request and called as a static
function. If the first argument is a hashref, its contents are used
to construct a new Perl::Critic object internally. The keys of that
hash should be the same as those supported by the C<Perl::Critic::new>
method. Here are some examples:
use Perl::Critic qw(critique);
# Use default parameters...
@violations = critique( $some_file );
# Use custom parameters...
@violations = critique( {-severity => 2}, $some_file );
# As a one-liner
%> perl -MPerl::Critic=critique -e 'print critique(shift)' some_file.pm
None of the other object-methods are currently supported as static
functions. Sorry.
=head1 CONFIGURATION
Most of the settings for Perl::Critic and each of the Policy modules
can be controlled by a configuration file. The default configuration
file is called F<.perlcriticrc>. Perl::Critic will look for this file
in the current directory first, and then in your home directory.
Alternatively, you can set the C<PERLCRITIC> environment variable to
explicitly point to a different file in another location. If none of
these files exist, and the C<-profile> option is not given to the
constructor, then all the modules that are found in the
Perl::Critic::Policy namespace will be loaded with their default
configuration.
The format of the configuration file is a series of INI-style blocks
that contain key-value pairs separated by '='. Comments should start
with '#' and can be placed on a separate line or after the name-value
pairs if you desire.
Default settings for Perl::Critic itself can be set B<before the first
named block.> For example, putting any or all of these at the top of
your configuration file will set the default value for the
corresponding constructor argument.
severity = 3 #Integer or named level
only = 1 #Zero or One
force = 0 #Zero or One
verbose = 4 #Integer or format spec
top = 50 #A positive integer
theme = (pbp || security) && bugs #A theme expression
include = NamingConventions ClassHierarchies #Space-delimited list
exclude = Variables Modules::RequirePackage #Space-delimited list
criticism-fatal = 1 #Zero or One
color = 1 #Zero or One
allow-unsafe = 1 #Zero or One
pager = less #pager to pipe output to
The remainder of the configuration file is a series of blocks like
this:
[Perl::Critic::Policy::Category::PolicyName]
severity = 1
set_themes = foo bar
add_themes = baz
maximum_violations_per_document = 57
arg1 = value1
arg2 = value2
C<Perl::Critic::Policy::Category::PolicyName> is the full name of a
module that implements the policy. The Policy modules distributed
with Perl::Critic have been grouped into categories according to the
table of contents in Damian Conway's book B<Perl Best Practices>. For
brevity, you can omit the C<'Perl::Critic::Policy'> part of the module
name.
C<severity> is the level of importance you wish to assign to the
Policy. All Policy modules are defined with a default severity value
ranging from 1 (least severe) to 5 (most severe). However, you may
disagree with the default severity and choose to give it a higher or
lower severity, based on your own coding philosophy. You can set the
C<severity> to an integer from 1 to 5, or use one of the equivalent
names:
SEVERITY NAME ...is equivalent to... SEVERITY NUMBER
----------------------------------------------------
gentle 5
stern 4
harsh 3
cruel 2
brutal 1
The names reflect how severely the code is criticized: a C<gentle>
criticism reports only the most severe violations, and so on down to a
C<brutal> criticism which reports even the most minor violations.
C<set_themes> sets the theme for the Policy and overrides its default
theme. The argument is a string of one or more whitespace-delimited
alphanumeric words. Themes are case-insensitive. See L<"POLICY
THEMES"> for more information.
C<add_themes> appends to the default themes for this Policy. The
argument is a string of one or more whitespace-delimited words.
Themes are case-insensitive. See L<"POLICY THEMES"> for more
information.
C<maximum_violations_per_document> limits the number of Violations the
Policy will return for a given document. Some Policies have a default
limit; see the documentation for the individual Policies to see
whether there is one. To force a Policy to not have a limit, specify
"no_limit" or the empty string for the value of this parameter.
The remaining key-value pairs are configuration parameters that will
be passed into the constructor for that Policy. The constructors for
most Policy objects do not support arguments, and those that do should
have reasonable defaults. See the documentation on the appropriate
Policy module for more details.
Instead of redefining the severity for a given Policy, you can
completely disable a Policy by prepending a '-' to the name of the
module in your configuration file. In this manner, the Policy will
never be loaded, regardless of the C<-severity> given to the
Perl::Critic constructor.
A simple configuration might look like this:
#--------------------------------------------------------------
# I think these are really important, so always load them
[TestingAndDebugging::RequireUseStrict]
severity = 5
[TestingAndDebugging::RequireUseWarnings]
severity = 5
#--------------------------------------------------------------
# I think these are less important, so only load when asked
[Variables::ProhibitPackageVars]
severity = 2
[ControlStructures::ProhibitPostfixControls]
allow = if unless # My custom configuration
severity = cruel # Same as "severity = 2"
#--------------------------------------------------------------
# Give these policies a custom theme. I can activate just
# these policies by saying `perlcritic -theme larry`
[Modules::RequireFilenameMatchesPackage]
add_themes = larry
[TestingAndDebugging::RequireTestLables]
add_themes = larry curly moe
#--------------------------------------------------------------
# I do not agree with these at all, so never load them
[-NamingConventions::Capitalization]
[-ValuesAndExpressions::ProhibitMagicNumbers]
#--------------------------------------------------------------
# For all other Policies, I accept the default severity,
# so no additional configuration is required for them.
For additional configuration examples, see the F<perlcriticrc> file
that is included in this F<examples> directory of this distribution.
Damian Conway's own Perl::Critic configuration is also included in
this distribution as F<examples/perlcriticrc-conway>.
=head1 THE POLICIES
A large number of Policy modules are distributed with Perl::Critic.
They are described briefly in the companion document
L<Perl::Critic::PolicySummary|Perl::Critic::PolicySummary> and in more
detail in the individual modules themselves. Say C<"perlcritic -doc
PATTERN"> to see the perldoc for all Policy modules that match the
regex C<m/PATTERN/ixms>
There are a number of distributions of additional policies on CPAN.
If L<Perl::Critic|Perl::Critic> doesn't contain a policy that you
want, some one may have already written it. See the L</"SEE ALSO">
section below for a list of some of these distributions.
=head1 POLICY THEMES
Each Policy is defined with one or more "themes". Themes can be used
to create arbitrary groups of Policies. They are intended to provide
an alternative mechanism for selecting your preferred set of Policies.
For example, you may wish disable a certain subset of Policies when
analyzing test programs. Conversely, you may wish to enable only a
specific subset of Policies when analyzing modules.
The Policies that ship with Perl::Critic have been broken into the
following themes. This is just our attempt to provide some basic
logical groupings. You are free to invent new themes that suit your
needs.
THEME DESCRIPTION
--------------------------------------------------------------------------
core All policies that ship with Perl::Critic
pbp Policies that come directly from "Perl Best Practices"
bugs Policies that that prevent or reveal bugs
maintenance Policies that affect the long-term health of the code
cosmetic Policies that only have a superficial effect
complexity Policies that specificaly relate to code complexity
security Policies that relate to security issues
tests Policies that are specific to test programs
Any Policy may fit into multiple themes. Say C<"perlcritic -list"> to
get a listing of all available Policies and the themes that are
associated with each one. You can also change the theme for any
Policy in your F<.perlcriticrc> file. See the L<"CONFIGURATION">
section for more information about that.
Using the C<-theme> option, you can create an arbitrarily complex rule
that determines which Policies will be loaded. Precedence is the same
as regular Perl code, and you can use parentheses to enforce
precedence as well. Supported operators are:
Operator Altertative Example
-----------------------------------------------------------------
&& and 'pbp && core'
|| or 'pbp || (bugs && security)'
! not 'pbp && ! (portability || complexity)'
Theme names are case-insensitive. If the C<-theme> is set to an empty
string, then it evaluates as true all Policies.
=head1 BENDING THE RULES
Perl::Critic takes a hard-line approach to your code: either you
comply or you don't. In the real world, it is not always practical
(nor even possible) to fully comply with coding standards. In such
cases, it is wise to show that you are knowingly violating the
standards and that you have a Damn Good Reason (DGR) for doing so.
To help with those situations, you can direct Perl::Critic to ignore
certain lines or blocks of code by using annotations:
require 'LegacyLibaray1.pl'; ## no critic
require 'LegacyLibrary2.pl'; ## no critic
for my $element (@list) {
## no critic
$foo = ""; #Violates 'ProhibitEmptyQuotes'
$barf = bar() if $foo; #Violates 'ProhibitPostfixControls'
#Some more evil code...
## use critic
#Some good code...
do_something($_);
}
The C<"## no critic"> annotations direct Perl::Critic to ignore the remaining
lines of code until a C<"## use critic"> annotation is found. If the C<"## no
critic"> annotation is on the same line as a code statement, then only that
line of code is overlooked. To direct perlcritic to ignore the C<"## no
critic"> annotations, use the C<--force> option.
A bare C<"## no critic"> annotation disables all the active Policies. If
you wish to disable only specific Policies, add a list of Policy names
as arguments, just as you would for the C<"no strict"> or C<"no
warnings"> pragmas. For example, this would disable the
C<ProhibitEmptyQuotes> and C<ProhibitPostfixControls> policies until
the end of the block or until the next C<"## use critic"> annotation
(whichever comes first):
## no critic (EmptyQuotes, PostfixControls)
# Now exempt from ValuesAndExpressions::ProhibitEmptyQuotes
$foo = "";
# Now exempt ControlStructures::ProhibitPostfixControls
$barf = bar() if $foo;
# Still subjected to ValuesAndExpression::RequireNumberSeparators
$long_int = 10000000000;
Since the Policy names are matched against the C<"## no critic">
arguments as regular expressions, you can abbreviate the Policy names
or disable an entire family of Policies in one shot like this:
## no critic (NamingConventions)
# Now exempt from NamingConventions::Capitalization
my $camelHumpVar = 'foo';
# Now exempt from NamingConventions::Capitalization
sub camelHumpSub {}
The argument list must be enclosed in parentheses and must contain one
or more comma-separated barewords (e.g. don't use quotes). The
C<"## no critic"> annotations can be nested, and Policies named by an
inner annotation will be disabled along with those already disabled an
outer annotation.
Some Policies like C<Subroutines::ProhibitExcessComplexity> apply to
an entire block of code. In those cases, C<"## no critic"> must
appear on the line where the violation is reported. For example:
sub complicated_function { ## no critic (ProhibitExcessComplexity)
# Your code here...
}
Policies such as C<Documentation::RequirePodSections> apply to the
entire document, in which case violations are reported at line 1.
Use this feature wisely. C<"## no critic"> annotations should be used in the
smallest possible scope, or only on individual lines of code. And you
should always be as specific as possible about which Policies you want
to disable (i.e. never use a bare C<"## no critic">). If Perl::Critic
complains about your code, try and find a compliant solution before
resorting to this feature.
=head1 THE L<Perl::Critic|Perl::Critic> PHILOSOPHY
Coding standards are deeply personal and highly subjective. The goal
of Perl::Critic is to help you write code that conforms with a set of
best practices. Our primary goal is not to dictate what those
practices are, but rather, to implement the practices discovered by
others. Ultimately, you make the rules -- Perl::Critic is merely a
tool for encouraging consistency. If there is a policy that you think
is important or that we have overlooked, we would be very grateful for
contributions, or you can simply load your own private set of policies
into Perl::Critic.
=head1 EXTENDING THE CRITIC
The modular design of Perl::Critic is intended to facilitate the
addition of new Policies. You'll need to have some understanding of
L<PPI|PPI>, but most Policy modules are pretty straightforward and
only require about 20 lines of code. Please see the
L<Perl::Critic::DEVELOPER|Perl::Critic::DEVELOPER> file included in
this distribution for a step-by-step demonstration of how to create
new Policy modules.
If you develop any new Policy modules, feel free to send them to C<<
<[email protected]> >> and I'll be happy to put them into the
Perl::Critic distribution. Or if you would like to work on the
Perl::Critic project directly, check out our repository at
L<http://perlcritic.tigris.org>. To subscribe to our mailing list,
send a message to L<mailto:[email protected]>.
The Perl::Critic team is also available for hire. If your
organization has its own coding standards, we can create custom
Policies to enforce your local guidelines. Or if your code base is
prone to a particular defect pattern, we can design Policies that will
help you catch those costly defects B<before> they go into production.
To discuss your needs with the Perl::Critic team, just contact C<<
<[email protected]> >>.
=head1 PREREQUISITES
Perl::Critic requires the following modules:
L<B::Keywords|B::Keywords>
L<Config::Tiny|Config::Tiny>
L<Email::Address|Email::Address>
L<Exception::Class|Exception::Class>
L<File::Spec|File::Spec>
L<File::Spec::Unix|File::Spec::Unix>
L<IO::String|IO::String>
L<List::MoreUtils|List::MoreUtils>
L<List::Util|List::Util>
L<Module::Pluggable|Module::Pluggable>
L<Perl::Tidy|Perl::Tidy>
L<Pod::Spell|Pod::Spell>
L<PPI|PPI>
L<Pod::PlainText|Pod::PlainText>
L<Pod::Select|Pod::Select>
L<Pod::Usage|Pod::Usage>
L<Readonly|Readonly>
L<Scalar::Util|Scalar::Util>
L<String::Format|String::Format>
L<Task::Weaken|Task::Weaken>
L<Text::ParseWords|Text::ParseWords>
L<version|version>
The following modules are optional, but recommended for complete
functionality:
L<File::HomeDir|File::HomeDir>
L<File::Which|File::Which>
=head1 CONTACTING THE DEVELOPMENT TEAM
You are encouraged to subscribe to the mailing list; send a message to
L<mailto:[email protected]>. See also the archives at
L<http://perlcritic.tigris.org/servlets/SummarizeList?listName=users>.
You can also contact the author at C<< <[email protected]> >>.
At least one member of the development team has started hanging around
in L<irc://irc.perl.org/#perlcritic>.
You can also follow Perl::Critic on Twitter, at
L<https://twitter.com/perlcritic>.
=head1 SEE ALSO
There are a number of distributions of additional Policies available.
A few are listed here:
L<Perl::Critic::More|Perl::Critic::More>
L<Perl::Critic::Bangs|Perl::Critic::Bangs>
L<Perl::Critic::Lax|Perl::Critic::Lax>
L<Perl::Critic::StricterSubs|Perl::Critic::StricterSubs>
L<Perl::Critic::Swift|Perl::Critic::Swift>
L<Perl::Critic::Tics|Perl::Critic::Tics>
These distributions enable you to use Perl::Critic in your unit tests:
L<Test::Perl::Critic|Test::Perl::Critic>
L<Test::Perl::Critic::Progressive|Test::Perl::Critic::Progressive>
There is also a distribution that will install all the Perl::Critic related
modules known to the development team:
L<Task::Perl::Critic|Task::Perl::Critic>
If you want to make sure you have absolutely everything, you can use this:
L<Task::Perl::Critic::IncludingOptionalDependencies|Task::Perl::Critic::IncludingOptionalDependencies>
=head1 BUGS
Scrutinizing Perl code is hard for humans, let alone machines. If you
find any bugs, particularly false-positives or false-negatives from a
Perl::Critic::Policy, please submit them to
L<http://rt.cpan.org/NoAuth/Bugs.html?Dist=Perl-Critic>. Thanks.
Most policies will produce false-negatives if they cannot understand a
particular block of code.
=head1 CREDITS
Adam Kennedy - For creating L<PPI|PPI>, the heart and soul of
L<Perl::Critic|Perl::Critic>.
Damian Conway - For writing B<Perl Best Practices>, finally :)
Chris Dolan - For contributing the best features and Policy modules.
Andy Lester - Wise sage and master of all-things-testing.
Elliot Shank - The self-proclaimed quality freak.
Giuseppe Maxia - For all the great ideas and positive encouragement.
and Sharon, my wife - For putting up with my all-night code sessions.
Thanks also to the Perl Foundation for providing a grant to support
Chris Dolan's project to implement twenty PBP policies.
L<http://www.perlfoundation.org/april_1_2007_new_grant_awards>
=head1 AUTHOR
Jeffrey Ryan Thalhammer <[email protected]>
=head1 COPYRIGHT
Copyright (c) 2005-2011 Imaginative Software Systems. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the same terms as Perl itself. The full text of this license
can be found in the LICENSE file included with this module.
=cut
##############################################################################
# Local Variables:
# mode: cperl
# cperl-indent-level: 4
# fill-column: 78
# indent-tabs-mode: nil
# c-indentation-style: bsd
# End:
# ex: set ts=8 sts=4 sw=4 tw=78 ft=perl expandtab shiftround :
| 38 | 320 | 0.683116 |
eda56d0ab9182c04350cc5a7243aa285511010e3 | 448 | pm | Perl | lib/Net/Async/Slack/Event/StarAdded.pm | team-at-cpan/Net-Async-Slack | 51b4bc1e07a692abbdcd3faa84ba1718df1ae20a | [
"Artistic-1.0"
] | null | null | null | lib/Net/Async/Slack/Event/StarAdded.pm | team-at-cpan/Net-Async-Slack | 51b4bc1e07a692abbdcd3faa84ba1718df1ae20a | [
"Artistic-1.0"
] | 2 | 2021-02-06T10:54:04.000Z | 2021-11-21T14:27:48.000Z | lib/Net/Async/Slack/Event/StarAdded.pm | team-at-cpan/Net-Async-Slack | 51b4bc1e07a692abbdcd3faa84ba1718df1ae20a | [
"Artistic-1.0"
] | null | null | null | package Net::Async::Slack::Event::StarAdded;
use strict;
use warnings;
# VERSION
use Net::Async::Slack::EventType;
=head1 NAME
Net::Async::Slack::Event::StarAdded - A member has starred an item
=head1 DESCRIPTION
Example input data:
stars:read
=cut
sub type { 'star_added' }
1;
__END__
=head1 AUTHOR
Tom Molesworth <[email protected]>
=head1 LICENSE
Copyright Tom Molesworth 2016-2021. Licensed under the same terms as Perl itself.
| 12.8 | 81 | 0.732143 |
Subsets and Splits