text
stringlengths 6
13.6M
| id
stringlengths 13
176
| metadata
dict | __index_level_0__
int64 0
1.69k
|
---|---|---|---|
export 'news_data_source_provider.dart';
export 'user_provider.dart';
| news_toolkit/flutter_news_example/api/lib/src/middleware/middleware.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/api/lib/src/middleware/middleware.dart",
"repo_id": "news_toolkit",
"token_count": 26
} | 912 |
import 'package:equatable/equatable.dart';
import 'package:json_annotation/json_annotation.dart';
import 'package:news_blocks/news_blocks.dart';
part 'relevant_search_response.g.dart';
/// {@template relevant_search_response}
/// A search response object which contains relevant news content.
/// {@endtemplate}
@JsonSerializable()
class RelevantSearchResponse extends Equatable {
/// {@macro relevant_search_response}
const RelevantSearchResponse({required this.articles, required this.topics});
/// Converts a `Map<String, dynamic>` into a
/// [RelevantSearchResponse] instance.
factory RelevantSearchResponse.fromJson(Map<String, dynamic> json) =>
_$RelevantSearchResponseFromJson(json);
/// The article content blocks.
@NewsBlocksConverter()
final List<NewsBlock> articles;
/// The associated relevant topics.
final List<String> topics;
/// Converts the current instance to a `Map<String, dynamic>`.
Map<String, dynamic> toJson() => _$RelevantSearchResponseToJson(this);
@override
List<Object> get props => [articles, topics];
}
| news_toolkit/flutter_news_example/api/lib/src/models/relevant_search_response/relevant_search_response.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/api/lib/src/models/relevant_search_response/relevant_search_response.dart",
"repo_id": "news_toolkit",
"token_count": 317
} | 913 |
/// The supported news category types.
enum Category {
/// News relating to business.
business,
/// News relating to entertainment.
entertainment,
/// Breaking news.
top,
/// News relating to health.
health,
/// News relating to science.
science,
/// News relating to sports.
sports,
/// News relating to technology.
technology;
/// Returns a [Category] for the [categoryName].
static Category fromString(String categoryName) =>
Category.values.firstWhere((category) => category.name == categoryName);
}
| news_toolkit/flutter_news_example/api/packages/news_blocks/lib/src/category.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/api/packages/news_blocks/lib/src/category.dart",
"repo_id": "news_toolkit",
"token_count": 145
} | 914 |
// GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: cast_nullable_to_non_nullable, implicit_dynamic_parameter, lines_longer_than_80_chars, prefer_const_constructors, require_trailing_commas
part of 'post_grid_tile_block.dart';
// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************
PostGridTileBlock _$PostGridTileBlockFromJson(Map<String, dynamic> json) =>
$checkedCreate(
'PostGridTileBlock',
json,
($checkedConvert) {
final val = PostGridTileBlock(
id: $checkedConvert('id', (v) => v as String),
category: $checkedConvert(
'category', (v) => $enumDecode(_$PostCategoryEnumMap, v)),
author: $checkedConvert('author', (v) => v as String),
publishedAt: $checkedConvert(
'published_at', (v) => DateTime.parse(v as String)),
imageUrl: $checkedConvert('image_url', (v) => v as String),
title: $checkedConvert('title', (v) => v as String),
description: $checkedConvert('description', (v) => v as String?),
action: $checkedConvert('action',
(v) => const BlockActionConverter().fromJson(v as Map?)),
type: $checkedConvert(
'type', (v) => v as String? ?? PostGridTileBlock.identifier),
isPremium: $checkedConvert('is_premium', (v) => v as bool? ?? false),
);
return val;
},
fieldKeyMap: const {
'publishedAt': 'published_at',
'imageUrl': 'image_url',
'isPremium': 'is_premium'
},
);
Map<String, dynamic> _$PostGridTileBlockToJson(PostGridTileBlock instance) {
final val = <String, dynamic>{
'id': instance.id,
'category': _$PostCategoryEnumMap[instance.category],
'author': instance.author,
'published_at': instance.publishedAt.toIso8601String(),
};
void writeNotNull(String key, dynamic value) {
if (value != null) {
val[key] = value;
}
}
writeNotNull('image_url', instance.imageUrl);
val['title'] = instance.title;
writeNotNull('description', instance.description);
writeNotNull('action', const BlockActionConverter().toJson(instance.action));
val['is_premium'] = instance.isPremium;
val['type'] = instance.type;
return val;
}
const _$PostCategoryEnumMap = {
PostCategory.business: 'business',
PostCategory.entertainment: 'entertainment',
PostCategory.health: 'health',
PostCategory.science: 'science',
PostCategory.sports: 'sports',
PostCategory.technology: 'technology',
};
| news_toolkit/flutter_news_example/api/packages/news_blocks/lib/src/post_grid_tile_block.g.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/api/packages/news_blocks/lib/src/post_grid_tile_block.g.dart",
"repo_id": "news_toolkit",
"token_count": 988
} | 915 |
// GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: cast_nullable_to_non_nullable, implicit_dynamic_parameter, lines_longer_than_80_chars, prefer_const_constructors, require_trailing_commas
part of 'spacer_block.dart';
// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************
SpacerBlock _$SpacerBlockFromJson(Map<String, dynamic> json) => $checkedCreate(
'SpacerBlock',
json,
($checkedConvert) {
final val = SpacerBlock(
spacing: $checkedConvert(
'spacing', (v) => $enumDecode(_$SpacingEnumMap, v)),
type: $checkedConvert(
'type', (v) => v as String? ?? SpacerBlock.identifier),
);
return val;
},
);
Map<String, dynamic> _$SpacerBlockToJson(SpacerBlock instance) =>
<String, dynamic>{
'spacing': _$SpacingEnumMap[instance.spacing],
'type': instance.type,
};
const _$SpacingEnumMap = {
Spacing.extraSmall: 'extraSmall',
Spacing.small: 'small',
Spacing.medium: 'medium',
Spacing.large: 'large',
Spacing.veryLarge: 'veryLarge',
Spacing.extraLarge: 'extraLarge',
};
| news_toolkit/flutter_news_example/api/packages/news_blocks/lib/src/spacer_block.g.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/api/packages/news_blocks/lib/src/spacer_block.g.dart",
"repo_id": "news_toolkit",
"token_count": 466
} | 916 |
// GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: cast_nullable_to_non_nullable, implicit_dynamic_parameter, lines_longer_than_80_chars, prefer_const_constructors, require_trailing_commas
part of 'video_introduction_block.dart';
// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************
VideoIntroductionBlock _$VideoIntroductionBlockFromJson(
Map<String, dynamic> json) =>
$checkedCreate(
'VideoIntroductionBlock',
json,
($checkedConvert) {
final val = VideoIntroductionBlock(
category: $checkedConvert(
'category', (v) => $enumDecode(_$PostCategoryEnumMap, v)),
title: $checkedConvert('title', (v) => v as String),
videoUrl: $checkedConvert('video_url', (v) => v as String),
type: $checkedConvert(
'type', (v) => v as String? ?? VideoIntroductionBlock.identifier),
);
return val;
},
fieldKeyMap: const {'videoUrl': 'video_url'},
);
Map<String, dynamic> _$VideoIntroductionBlockToJson(
VideoIntroductionBlock instance) =>
<String, dynamic>{
'category': _$PostCategoryEnumMap[instance.category],
'title': instance.title,
'video_url': instance.videoUrl,
'type': instance.type,
};
const _$PostCategoryEnumMap = {
PostCategory.business: 'business',
PostCategory.entertainment: 'entertainment',
PostCategory.health: 'health',
PostCategory.science: 'science',
PostCategory.sports: 'sports',
PostCategory.technology: 'technology',
};
| news_toolkit/flutter_news_example/api/packages/news_blocks/lib/src/video_introduction_block.g.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/api/packages/news_blocks/lib/src/video_introduction_block.g.dart",
"repo_id": "news_toolkit",
"token_count": 591
} | 917 |
import 'package:news_blocks/news_blocks.dart';
import 'package:test/test.dart';
void main() {
group('PostLargeBlock', () {
test('can be (de)serialized', () {
final block = PostLargeBlock(
id: 'id',
category: PostCategory.technology,
author: 'author',
publishedAt: DateTime(2022, 3, 9),
imageUrl: 'imageUrl',
title: 'title',
);
expect(PostLargeBlock.fromJson(block.toJson()), equals(block));
});
});
}
| news_toolkit/flutter_news_example/api/packages/news_blocks/test/src/post_large_block_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/api/packages/news_blocks/test/src/post_large_block_test.dart",
"repo_id": "news_toolkit",
"token_count": 207
} | 918 |
import 'dart:io';
import 'package:dart_frog/dart_frog.dart';
import 'package:flutter_news_example_api/api.dart';
import 'package:mocktail/mocktail.dart';
import 'package:test/test.dart';
import '../../../routes/api/v1/categories/index.dart' as route;
class _MockNewsDataSource extends Mock implements NewsDataSource {}
class _MockRequestContext extends Mock implements RequestContext {}
void main() {
group('GET /api/v1/categories', () {
late NewsDataSource newsDataSource;
setUp(() {
newsDataSource = _MockNewsDataSource();
});
test('responds with a 200 and categories.', () async {
const categories = [Category.sports, Category.entertainment];
when(
() => newsDataSource.getCategories(),
).thenAnswer((_) async => categories);
const expected = CategoriesResponse(categories: categories);
final request = Request('GET', Uri.parse('http://127.0.0.1/'));
final context = _MockRequestContext();
when(() => context.request).thenReturn(request);
when(() => context.read<NewsDataSource>()).thenReturn(newsDataSource);
final response = await route.onRequest(context);
expect(response.statusCode, equals(HttpStatus.ok));
expect(await response.json(), equals(expected.toJson()));
});
});
test('responds with 405 when method is not GET.', () async {
final request = Request('POST', Uri.parse('http://127.0.0.1/'));
final context = _MockRequestContext();
when(() => context.request).thenReturn(request);
final response = await route.onRequest(context);
expect(response.statusCode, equals(HttpStatus.methodNotAllowed));
});
}
| news_toolkit/flutter_news_example/api/test/routes/categories/index_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/api/test/routes/categories/index_test.dart",
"repo_id": "news_toolkit",
"token_count": 567
} | 919 |
export 'bloc/full_screen_ads_bloc.dart';
export 'widgets/widgets.dart';
| news_toolkit/flutter_news_example/lib/ads/ads.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/ads/ads.dart",
"repo_id": "news_toolkit",
"token_count": 30
} | 920 |
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:flutter_news_example/analytics/analytics.dart';
import 'package:flutter_news_example/app/app.dart';
class AuthenticatedUserListener extends StatelessWidget {
const AuthenticatedUserListener({
required this.child,
super.key,
});
final Widget child;
@override
Widget build(BuildContext context) {
return BlocListener<AppBloc, AppState>(
listener: (context, state) {
if (state.status.isLoggedIn) {
context.read<AnalyticsBloc>().add(
TrackAnalyticsEvent(
state.user.isNewUser ? RegistrationEvent() : LoginEvent(),
),
);
}
},
listenWhen: (previous, current) => previous.status != current.status,
child: child,
);
}
}
| news_toolkit/flutter_news_example/lib/app/widgets/authenticated_user_listener.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/app/widgets/authenticated_user_listener.dart",
"repo_id": "news_toolkit",
"token_count": 354
} | 921 |
import 'dart:async';
import 'package:collection/collection.dart';
import 'package:equatable/equatable.dart';
import 'package:hydrated_bloc/hydrated_bloc.dart';
import 'package:json_annotation/json_annotation.dart';
import 'package:news_repository/news_repository.dart';
part 'categories_event.dart';
part 'categories_state.dart';
part 'categories_bloc.g.dart';
class CategoriesBloc extends HydratedBloc<CategoriesEvent, CategoriesState> {
CategoriesBloc({
required NewsRepository newsRepository,
}) : _newsRepository = newsRepository,
super(const CategoriesState.initial()) {
on<CategoriesRequested>(_onCategoriesRequested);
on<CategorySelected>(_onCategorySelected);
}
final NewsRepository _newsRepository;
FutureOr<void> _onCategoriesRequested(
CategoriesRequested event,
Emitter<CategoriesState> emit,
) async {
emit(state.copyWith(status: CategoriesStatus.loading));
try {
final response = await _newsRepository.getCategories();
emit(
state.copyWith(
status: CategoriesStatus.populated,
categories: response.categories,
selectedCategory: response.categories.firstOrNull,
),
);
} catch (error, stackTrace) {
emit(state.copyWith(status: CategoriesStatus.failure));
addError(error, stackTrace);
}
}
void _onCategorySelected(
CategorySelected event,
Emitter<CategoriesState> emit,
) =>
emit(state.copyWith(selectedCategory: event.category));
@override
CategoriesState? fromJson(Map<String, dynamic> json) =>
CategoriesState.fromJson(json);
@override
Map<String, dynamic>? toJson(CategoriesState state) => state.toJson();
}
| news_toolkit/flutter_news_example/lib/categories/bloc/categories_bloc.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/categories/bloc/categories_bloc.dart",
"repo_id": "news_toolkit",
"token_count": 609
} | 922 |
import 'package:app_ui/app_ui.dart';
import 'package:flutter/material.dart';
/// Renders a widget containing a progress indicator that calls
/// [onPresented] when the item becomes visible.
class CategoryFeedLoaderItem extends StatefulWidget {
const CategoryFeedLoaderItem({super.key, this.onPresented});
/// A callback performed when the widget is presented.
final VoidCallback? onPresented;
@override
State<CategoryFeedLoaderItem> createState() => _CategoryFeedLoaderItemState();
}
class _CategoryFeedLoaderItemState extends State<CategoryFeedLoaderItem> {
@override
void initState() {
super.initState();
widget.onPresented?.call();
}
@override
Widget build(BuildContext context) {
return const Padding(
padding: EdgeInsets.symmetric(vertical: AppSpacing.lg),
child: Center(
child: CircularProgressIndicator(),
),
);
}
}
| news_toolkit/flutter_news_example/lib/feed/widgets/category_feed_loader_item.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/feed/widgets/category_feed_loader_item.dart",
"repo_id": "news_toolkit",
"token_count": 282
} | 923 |
export 'bloc/login_bloc.dart';
export 'bloc/login_with_email_link_bloc.dart';
export 'view/view.dart';
export 'widgets/widgets.dart';
| news_toolkit/flutter_news_example/lib/login/login.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/login/login.dart",
"repo_id": "news_toolkit",
"token_count": 56
} | 924 |
export 'view/view.dart';
export 'widgets/widgets.dart';
| news_toolkit/flutter_news_example/lib/navigation/navigation.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/navigation/navigation.dart",
"repo_id": "news_toolkit",
"token_count": 22
} | 925 |
part of 'notification_preferences_bloc.dart';
enum NotificationPreferencesStatus {
initial,
loading,
success,
failure,
}
class NotificationPreferencesState extends Equatable {
const NotificationPreferencesState({
required this.selectedCategories,
required this.status,
required this.categories,
});
NotificationPreferencesState.initial()
: this(
selectedCategories: {},
status: NotificationPreferencesStatus.initial,
categories: {},
);
final NotificationPreferencesStatus status;
final Set<Category> categories;
final Set<Category> selectedCategories;
@override
List<Object?> get props => [selectedCategories, status, categories];
NotificationPreferencesState copyWith({
Set<Category>? selectedCategories,
NotificationPreferencesStatus? status,
Set<Category>? categories,
}) {
return NotificationPreferencesState(
selectedCategories: selectedCategories ?? this.selectedCategories,
status: status ?? this.status,
categories: categories ?? this.categories,
);
}
}
| news_toolkit/flutter_news_example/lib/notification_preferences/bloc/notification_preferences_state.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/notification_preferences/bloc/notification_preferences_state.dart",
"repo_id": "news_toolkit",
"token_count": 341
} | 926 |
part of 'search_bloc.dart';
abstract class SearchEvent extends Equatable {
const SearchEvent();
}
class SearchTermChanged extends SearchEvent {
const SearchTermChanged({this.searchTerm = ''});
final String searchTerm;
@override
List<Object?> get props => [searchTerm];
}
| news_toolkit/flutter_news_example/lib/search/bloc/search_event.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/search/bloc/search_event.dart",
"repo_id": "news_toolkit",
"token_count": 83
} | 927 |
part of 'subscriptions_bloc.dart';
enum PurchaseStatus {
none,
pending,
completed,
failed,
}
class SubscriptionsState extends Equatable {
const SubscriptionsState({
required this.subscriptions,
required this.purchaseStatus,
});
SubscriptionsState.initial()
: this(
subscriptions: [],
purchaseStatus: PurchaseStatus.none,
);
final List<Subscription> subscriptions;
final PurchaseStatus purchaseStatus;
@override
List<Object> get props => [subscriptions, purchaseStatus];
SubscriptionsState copyWith({
List<Subscription>? subscriptions,
PurchaseStatus? purchaseStatus,
}) =>
SubscriptionsState(
subscriptions: subscriptions ?? this.subscriptions,
purchaseStatus: purchaseStatus ?? this.purchaseStatus,
);
}
| news_toolkit/flutter_news_example/lib/subscriptions/dialog/bloc/subscriptions_state.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/subscriptions/dialog/bloc/subscriptions_state.dart",
"repo_id": "news_toolkit",
"token_count": 274
} | 928 |
export 'terms_of_service_body.dart';
| news_toolkit/flutter_news_example/lib/terms_of_service/widgets/widgets.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/lib/terms_of_service/widgets/widgets.dart",
"repo_id": "news_toolkit",
"token_count": 14
} | 929 |
include: package:very_good_analysis/analysis_options.5.1.0.yaml
| news_toolkit/flutter_news_example/packages/ads_consent_client/analysis_options.yaml/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/ads_consent_client/analysis_options.yaml",
"repo_id": "news_toolkit",
"token_count": 23
} | 930 |
include: package:very_good_analysis/analysis_options.5.1.0.yaml
analyzer:
exclude:
- lib/src/generated/**
| news_toolkit/flutter_news_example/packages/app_ui/analysis_options.yaml/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/app_ui/analysis_options.yaml",
"repo_id": "news_toolkit",
"token_count": 43
} | 931 |
import 'package:app_ui/app_ui.dart';
import 'package:flutter/material.dart';
import 'package:gallery/colors/colors.dart';
import 'package:gallery/spacing/spacing.dart';
import 'package:gallery/typography/typography.dart';
import 'package:gallery/widgets/widgets.dart';
void main() => runApp(const MyApp());
class MyApp extends StatelessWidget {
const MyApp({super.key});
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter News Example Gallery',
theme: const AppTheme().themeData,
home: const RootPage(),
);
}
}
class RootPage extends StatelessWidget {
const RootPage({super.key});
@override
Widget build(BuildContext context) {
final pages = [
_ListItem(
icon: const Icon(Icons.color_lens),
title: const Text('Colors'),
subtitle: const Text('All of the predefined colors'),
onTap: () => Navigator.of(context).push<void>(ColorsPage.route()),
),
_ListItem(
icon: const Icon(Icons.text_format),
title: const Text('Typography'),
subtitle: const Text('All of the predefined text styles'),
onTap: () => Navigator.of(context).push<void>(TypographyPage.route()),
),
_ListItem(
icon: const Icon(Icons.border_vertical),
title: const Text('Spacing'),
subtitle: const Text('All of the predefined spacings'),
onTap: () => Navigator.of(context).push<void>(SpacingPage.route()),
),
_ListItem(
icon: const Icon(Icons.widgets),
title: const Text('Widgets'),
subtitle: const Text('All of the predefined widgets'),
onTap: () => Navigator.of(context).push<void>(WidgetsPage.route()),
),
];
return Scaffold(
appBar: AppBar(title: const Text('Flutter News Example Gallery')),
body: ListView.separated(
itemCount: pages.length,
itemBuilder: (_, index) => pages[index],
separatorBuilder: (_, __) => const Divider(),
),
);
}
}
class _ListItem extends StatelessWidget {
const _ListItem({
required this.onTap,
required this.icon,
required this.title,
required this.subtitle,
});
final VoidCallback onTap;
final Icon icon;
final Text title;
final Text subtitle;
@override
Widget build(BuildContext context) {
return ListTile(
leading: IconTheme(
data: IconThemeData(
color: Theme.of(context).iconTheme.color,
),
child: icon,
),
title: title,
subtitle: subtitle,
trailing: const Icon(Icons.arrow_forward),
onTap: onTap,
);
}
}
| news_toolkit/flutter_news_example/packages/app_ui/gallery/lib/main.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/app_ui/gallery/lib/main.dart",
"repo_id": "news_toolkit",
"token_count": 1037
} | 932 |
import 'package:app_ui/app_ui.dart';
import 'package:flutter/material.dart';
/// The app consists of two main text style definitions: UI and Content.
///
/// Content text style is primarily used for all content-based components,
/// e.g. news feed including articles and sections, while the UI text style
/// is used for the rest of UI components.
///
/// The default app's [TextTheme] is [AppTheme.uiTextTheme].
///
/// Use [ContentThemeOverrideBuilder] to override the default [TextTheme]
/// to [AppTheme.contentTextTheme].
/// UI Text Style Definitions
abstract class UITextStyle {
static const _baseTextStyle = TextStyle(
package: 'app_ui',
fontWeight: AppFontWeight.regular,
fontFamily: 'NotoSansDisplay',
decoration: TextDecoration.none,
textBaseline: TextBaseline.alphabetic,
);
/// Display 2 Text Style
static final TextStyle display2 = _baseTextStyle.copyWith(
fontSize: 57,
fontWeight: AppFontWeight.bold,
height: 1.12,
letterSpacing: -0.25,
);
/// Display 3 Text Style
static final TextStyle display3 = _baseTextStyle.copyWith(
fontSize: 45,
fontWeight: AppFontWeight.bold,
height: 1.15,
);
/// Headline 1 Text Style
static final TextStyle headline1 = _baseTextStyle.copyWith(
fontSize: 36,
fontWeight: AppFontWeight.bold,
height: 1.22,
);
/// Headline 2 Text Style
static final TextStyle headline2 = _baseTextStyle.copyWith(
fontSize: 32,
fontWeight: AppFontWeight.bold,
height: 1.25,
);
/// Headline 3 Text Style
static final TextStyle headline3 = _baseTextStyle.copyWith(
fontSize: 28,
fontWeight: AppFontWeight.semiBold,
height: 1.28,
);
/// Headline 4 Text Style
static final TextStyle headline4 = _baseTextStyle.copyWith(
fontSize: 24,
fontWeight: AppFontWeight.semiBold,
height: 1.33,
);
/// Headline 5 Text Style
static final TextStyle headline5 = _baseTextStyle.copyWith(
fontSize: 22,
fontWeight: AppFontWeight.regular,
height: 1.27,
);
/// Headline 6 Text Style
static final TextStyle headline6 = _baseTextStyle.copyWith(
fontSize: 18,
fontWeight: AppFontWeight.semiBold,
height: 1.33,
);
/// Subtitle 1 Text Style
static final TextStyle subtitle1 = _baseTextStyle.copyWith(
fontSize: 16,
height: 1.5,
letterSpacing: 0.1,
);
/// Subtitle 2 Text Style
static final TextStyle subtitle2 = _baseTextStyle.copyWith(
fontSize: 14,
height: 1.42,
letterSpacing: 0.1,
);
/// Body Text 1 Text Style
static final TextStyle bodyText1 = _baseTextStyle.copyWith(
fontSize: 16,
height: 1.5,
letterSpacing: 0.5,
);
/// Body Text 2 Text Style (the default)
static final TextStyle bodyText2 = _baseTextStyle.copyWith(
fontSize: 14,
height: 1.42,
letterSpacing: 0.25,
);
/// Caption Text Style
static final TextStyle caption = _baseTextStyle.copyWith(
fontSize: 12,
height: 1.33,
letterSpacing: 0.4,
);
/// Button Text Style
static final TextStyle button = _baseTextStyle.copyWith(
fontSize: 16,
height: 1.42,
letterSpacing: 0.1,
);
/// Overline Text Style
static final TextStyle overline = _baseTextStyle.copyWith(
fontSize: 12,
height: 1.33,
letterSpacing: 0.5,
);
/// Label Small Text Style
static final TextStyle labelSmall = _baseTextStyle.copyWith(
fontSize: 11,
height: 1.45,
letterSpacing: 0.5,
);
}
/// Content Text Style Definitions
abstract class ContentTextStyle {
static const _baseTextStyle = TextStyle(
package: 'app_ui',
fontWeight: AppFontWeight.regular,
fontFamily: 'NotoSerif',
decoration: TextDecoration.none,
textBaseline: TextBaseline.alphabetic,
);
/// Display 1 Text Style
static final TextStyle display1 = _baseTextStyle.copyWith(
fontSize: 64,
fontWeight: AppFontWeight.bold,
height: 1.18,
letterSpacing: -0.5,
);
/// Display 2 Text Style
static final TextStyle display2 = _baseTextStyle.copyWith(
fontSize: 57,
fontWeight: AppFontWeight.bold,
height: 1.12,
letterSpacing: -0.25,
);
/// Display 3 Text Style
static final TextStyle display3 = _baseTextStyle.copyWith(
fontSize: 45,
fontWeight: AppFontWeight.bold,
height: 1.15,
);
/// Headline 1 Text Style
static final TextStyle headline1 = _baseTextStyle.copyWith(
fontSize: 36,
fontWeight: AppFontWeight.semiBold,
height: 1.22,
);
/// Headline 2 Text Style
static final TextStyle headline2 = _baseTextStyle.copyWith(
fontSize: 32,
fontWeight: AppFontWeight.medium,
height: 1.25,
);
/// Headline 3 Text Style
static final TextStyle headline3 = _baseTextStyle.copyWith(
fontSize: 28,
fontWeight: AppFontWeight.medium,
height: 1.28,
);
/// Headline 4 Text Style
static final TextStyle headline4 = _baseTextStyle.copyWith(
fontSize: 24,
fontWeight: AppFontWeight.semiBold,
height: 1.33,
);
/// Headline 5 Text Style
static final TextStyle headline5 = _baseTextStyle.copyWith(
fontSize: 22,
height: 1.27,
);
/// Headline 6 Text Style
static final TextStyle headline6 = _baseTextStyle.copyWith(
fontSize: 18,
fontWeight: AppFontWeight.semiBold,
height: 1.33,
);
/// Subtitle 1 Text Style
static final TextStyle subtitle1 = _baseTextStyle.copyWith(
fontSize: 16,
height: 1.5,
letterSpacing: 0.1,
);
/// Subtitle 2 Text Style
static final TextStyle subtitle2 = _baseTextStyle.copyWith(
fontSize: 14,
fontWeight: AppFontWeight.medium,
height: 1.42,
letterSpacing: 0.1,
);
/// Body Text 1 Text Style
static final TextStyle bodyText1 = _baseTextStyle.copyWith(
fontSize: 16,
height: 1.5,
letterSpacing: 0.5,
);
/// Body Text 2 Text Style (the default)
static final TextStyle bodyText2 = _baseTextStyle.copyWith(
fontSize: 14,
height: 1.42,
letterSpacing: 0.25,
);
/// Button Text Style
static final TextStyle button = _baseTextStyle.copyWith(
fontFamily: 'Montserrat',
fontSize: 14,
fontWeight: AppFontWeight.medium,
height: 1.42,
letterSpacing: 0.1,
);
/// Caption Text Style
static final TextStyle caption = _baseTextStyle.copyWith(
fontFamily: 'NotoSansDisplay',
fontSize: 12,
height: 1.33,
letterSpacing: 0.4,
);
/// Overline Text Style
static final TextStyle overline = _baseTextStyle.copyWith(
fontFamily: 'NotoSansDisplay',
fontWeight: AppFontWeight.semiBold,
fontSize: 12,
height: 1.33,
letterSpacing: 0.5,
);
/// Label Small Text Style
static final TextStyle labelSmall = _baseTextStyle.copyWith(
fontFamily: 'NotoSansDisplay',
fontSize: 11,
height: 1.45,
letterSpacing: 0.5,
);
}
| news_toolkit/flutter_news_example/packages/app_ui/lib/src/typography/app_text_styles.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/app_ui/lib/src/typography/app_text_styles.dart",
"repo_id": "news_toolkit",
"token_count": 2432
} | 933 |
import 'package:app_ui/app_ui.dart';
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:mockingjay/mockingjay.dart';
import '../helpers/helpers.dart';
class MockFunction extends Mock {
void call();
}
void main() {
group('AppBackButton', () {
testWidgets('renders IconButton', (tester) async {
await tester.pumpApp(
Scaffold(
appBar: AppBar(
leading: const AppBackButton(),
),
),
);
expect(
find.byType(IconButton),
findsOneWidget,
);
});
testWidgets('renders IconButton when light', (tester) async {
await tester.pumpApp(
Scaffold(
appBar: AppBar(
leading: const AppBackButton.light(),
),
),
);
expect(
find.byType(IconButton),
findsOneWidget,
);
});
group('navigates', () {
testWidgets('back when press the icon button', (tester) async {
final navigator = MockNavigator();
when(navigator.pop).thenAnswer((_) async {});
await tester.pumpApp(
const AppBackButton(),
navigator: navigator,
);
await tester.tap(find.byType(IconButton));
await tester.pumpAndSettle();
verify(navigator.pop).called(1);
});
testWidgets('call onPressed when is provided ', (tester) async {
final onPressed = MockFunction();
await tester.pumpApp(
AppBackButton(onPressed: onPressed.call),
);
await tester.tap(find.byType(IconButton));
await tester.pumpAndSettle();
verify(onPressed.call).called(1);
});
testWidgets(
'call onPressed when is provided '
'and style is light', (tester) async {
final onPressed = MockFunction();
await tester.pumpApp(
AppBackButton.light(onPressed: onPressed.call),
);
await tester.tap(find.byType(IconButton));
await tester.pumpAndSettle();
verify(onPressed.call).called(1);
});
});
});
}
| news_toolkit/flutter_news_example/packages/app_ui/test/src/widgets/app_back_button_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/app_ui/test/src/widgets/app_back_button_test.dart",
"repo_id": "news_toolkit",
"token_count": 974
} | 934 |
import 'package:article_repository/article_repository.dart';
import 'package:clock/clock.dart';
import 'package:flutter_news_example_api/client.dart';
import 'package:mocktail/mocktail.dart';
import 'package:test/test.dart';
class MockFlutterNewsExampleApiClient extends Mock
implements FlutterNewsExampleApiClient {}
class MockArticleStorage extends Mock implements ArticleStorage {}
void main() {
group('ArticleRepository', () {
late FlutterNewsExampleApiClient apiClient;
late ArticleStorage storage;
late ArticleRepository articleRepository;
setUp(() {
apiClient = MockFlutterNewsExampleApiClient();
storage = MockArticleStorage();
when(() => storage.setArticleViews(any())).thenAnswer((_) async {});
when(() => storage.setArticleViewsResetDate(any()))
.thenAnswer((_) async {});
articleRepository = ArticleRepository(
apiClient: apiClient,
storage: storage,
);
});
group('getArticle', () {
test(
'returns ArticleResponse '
'from ApiClient.getArticle', () {
const content = <NewsBlock>[
TextCaptionBlock(text: 'text', color: TextCaptionColor.normal),
TextParagraphBlock(text: 'text'),
];
final articleResponse = ArticleResponse(
title: 'title',
content: content,
totalCount: content.length,
url: Uri.parse('https://www.dglobe.com/'),
isPremium: false,
isPreview: false,
);
when(
() => apiClient.getArticle(
id: any(named: 'id'),
offset: any(named: 'offset'),
limit: any(named: 'limit'),
preview: any(named: 'preview'),
),
).thenAnswer((_) async => articleResponse);
expect(
articleRepository.getArticle(
id: 'id',
offset: 10,
limit: 20,
),
completion(equals(articleResponse)),
);
verify(
() => apiClient.getArticle(
id: 'id',
offset: 10,
limit: 20,
),
).called(1);
});
test(
'throws GetArticleFailure '
'if ApiClient.getArticle fails', () async {
when(
() => apiClient.getArticle(
id: any(named: 'id'),
offset: any(named: 'offset'),
limit: any(named: 'limit'),
),
).thenThrow(Exception);
expect(
() => articleRepository.getArticle(id: 'id'),
throwsA(isA<GetArticleFailure>()),
);
});
});
group('getRelatedArticles', () {
test(
'returns RelatedArticlesResponse '
'from ApiClient.getRelatedArticles', () async {
const relatedArticlesResponse = RelatedArticlesResponse(
relatedArticles: [
SpacerBlock(spacing: Spacing.extraLarge),
DividerHorizontalBlock(),
],
totalCount: 2,
);
when(
() => apiClient.getRelatedArticles(
id: any(named: 'id'),
),
).thenAnswer((_) async => relatedArticlesResponse);
final response = await articleRepository.getRelatedArticles(id: 'id');
expect(response, equals(relatedArticlesResponse));
});
test(
'throws GetRelatedArticlesFailure '
'if ApiClient.getRelatedArticles fails', () async {
when(
() => apiClient.getRelatedArticles(
id: any(named: 'id'),
),
).thenThrow(Exception());
expect(
articleRepository.getRelatedArticles(id: 'id'),
throwsA(isA<GetRelatedArticlesFailure>()),
);
});
});
group('incrementArticleViews', () {
test(
'calls ArticleStorage.setArticleViews '
'with current article views increased by 1', () async {
const currentArticleViews = 3;
when(storage.fetchArticleViews)
.thenAnswer((_) async => currentArticleViews);
await articleRepository.incrementArticleViews();
verify(storage.fetchArticleViews).called(1);
verify(() => storage.setArticleViews(currentArticleViews + 1))
.called(1);
});
test(
'throws an IncrementArticleViewsFailure '
'when incrementing article views fails', () async {
when(() => storage.setArticleViews(any())).thenThrow(Exception());
expect(
() => articleRepository.incrementArticleViews(),
throwsA(isA<IncrementArticleViewsFailure>()),
);
});
});
group('decrementArticleViews', () {
test(
'calls ArticleStorage.setArticleViews '
'with current article views decreased by 1', () async {
const currentArticleViews = 3;
when(storage.fetchArticleViews)
.thenAnswer((_) async => currentArticleViews);
await articleRepository.decrementArticleViews();
verify(storage.fetchArticleViews).called(1);
verify(() => storage.setArticleViews(currentArticleViews - 1))
.called(1);
});
test(
'throws a DecrementArticleViewsFailure '
'when decrementing article views fails', () async {
when(() => storage.setArticleViews(any())).thenThrow(Exception());
expect(
() => articleRepository.decrementArticleViews(),
throwsA(isA<DecrementArticleViewsFailure>()),
);
});
});
group('resetArticleViews', () {
test(
'calls ArticleStorage.setArticleViews '
'with 0 article views', () async {
await articleRepository.resetArticleViews();
verify(() => storage.setArticleViews(0)).called(1);
});
test(
'calls ArticleStorage.setArticleViewsResetDate '
'with current date', () async {
final now = DateTime(2022, 6, 7);
await withClock(Clock.fixed(now), () async {
await articleRepository.resetArticleViews();
verify(() => storage.setArticleViewsResetDate(now)).called(1);
});
});
test(
'throws a ResetArticleViewsFailure '
'when resetting article views fails', () async {
when(() => storage.setArticleViews(any())).thenThrow(Exception());
expect(
() => articleRepository.resetArticleViews(),
throwsA(isA<ResetArticleViewsFailure>()),
);
});
});
group('fetchArticleViews', () {
test(
'returns the number of article views '
'from ArticleStorage.fetchArticleViews', () async {
const currentArticleViews = 3;
when(storage.fetchArticleViews)
.thenAnswer((_) async => currentArticleViews);
when(storage.fetchArticleViewsResetDate).thenAnswer((_) async => null);
final result = await articleRepository.fetchArticleViews();
expect(result.views, equals(currentArticleViews));
});
test(
'returns the reset date of the number of article views '
'from ArticleStorage.fetchArticleViewsResetDate', () async {
final resetDate = DateTime(2022, 6, 7);
when(storage.fetchArticleViews).thenAnswer((_) async => 0);
when(storage.fetchArticleViewsResetDate)
.thenAnswer((_) async => resetDate);
final result = await articleRepository.fetchArticleViews();
expect(result.resetAt, equals(resetDate));
});
test(
'throws a FetchArticleViewsFailure '
'when fetching article views fails', () async {
when(storage.fetchArticleViews).thenThrow(Exception());
expect(
() => articleRepository.fetchArticleViews(),
throwsA(isA<FetchArticleViewsFailure>()),
);
});
});
group('ArticleFailure', () {
final error = Exception('errorMessage');
group('GetArticleFailure', () {
test('has correct props', () {
expect(GetArticleFailure(error).props, [error]);
});
});
group('GetRelatedArticlesFailure', () {
test('has correct props', () {
expect(GetRelatedArticlesFailure(error).props, [error]);
});
});
group('IncrementArticleViewsFailure', () {
test('has correct props', () {
expect(IncrementArticleViewsFailure(error).props, [error]);
});
});
group('DecrementArticleViewsFailure', () {
test('has correct props', () {
expect(DecrementArticleViewsFailure(error).props, [error]);
});
});
group('ResetArticleViewsFailure', () {
test('has correct props', () {
expect(ResetArticleViewsFailure(error).props, [error]);
});
});
group('FetchArticleViewsFailure', () {
test('has correct props', () {
expect(FetchArticleViewsFailure(error).props, [error]);
});
});
});
group('incrementTotalArticleViews', () {
test(
'calls UserStorage.setTotalArticleViews '
'with current total article views increased by 1', () async {
const totalArticleViews = 3;
when(storage.fetchTotalArticleViews)
.thenAnswer((_) async => totalArticleViews);
when(() => storage.setTotalArticleViews(any()))
.thenAnswer((_) async {});
await articleRepository.incrementTotalArticleViews();
verify(storage.fetchTotalArticleViews).called(1);
verify(
() => storage.setTotalArticleViews(totalArticleViews + 1),
).called(1);
});
test(
'throws an IncrementTotalArticleViewsFailure '
'when incrementing total article views fails', () async {
when(() => storage.setTotalArticleViews(any())).thenThrow(Exception());
expect(
() => articleRepository.incrementTotalArticleViews(),
throwsA(isA<IncrementTotalArticleViewsFailure>()),
);
});
});
group('fetchTotalArticleViews', () {
test(
'returns the number of total article views '
'from UserStorage.fetchTotalArticleViews', () async {
const currentArticleViews = 3;
when(storage.fetchTotalArticleViews)
.thenAnswer((_) async => currentArticleViews);
final result = await articleRepository.fetchTotalArticleViews();
expect(result, equals(currentArticleViews));
});
test(
'throws a FetchTotalArticleViewsFailure '
'when fetching total article views fails', () async {
when(storage.fetchTotalArticleViews).thenThrow(Exception());
expect(
() => articleRepository.fetchTotalArticleViews(),
throwsA(isA<FetchTotalArticleViewsFailure>()),
);
});
});
});
}
| news_toolkit/flutter_news_example/packages/article_repository/test/src/article_repository_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/article_repository/test/src/article_repository_test.dart",
"repo_id": "news_toolkit",
"token_count": 4678
} | 935 |
import 'package:authentication_client/authentication_client.dart';
import 'package:firebase_auth/firebase_auth.dart' as firebase_auth;
import 'package:flutter_facebook_auth/flutter_facebook_auth.dart';
import 'package:google_sign_in/google_sign_in.dart';
import 'package:sign_in_with_apple/sign_in_with_apple.dart';
import 'package:token_storage/token_storage.dart';
import 'package:twitter_login/twitter_login.dart';
/// Signature for [SignInWithApple.getAppleIDCredential].
typedef GetAppleCredentials = Future<AuthorizationCredentialAppleID> Function({
required List<AppleIDAuthorizationScopes> scopes,
WebAuthenticationOptions webAuthenticationOptions,
String nonce,
String state,
});
/// {@template firebase_authentication_client}
/// A Firebase implementation of the [AuthenticationClient] interface.
/// {@endtemplate}
class FirebaseAuthenticationClient implements AuthenticationClient {
/// {@macro firebase_authentication_client}
FirebaseAuthenticationClient({
required TokenStorage tokenStorage,
firebase_auth.FirebaseAuth? firebaseAuth,
GoogleSignIn? googleSignIn,
GetAppleCredentials? getAppleCredentials,
FacebookAuth? facebookAuth,
TwitterLogin? twitterLogin,
}) : _tokenStorage = tokenStorage,
_firebaseAuth = firebaseAuth ?? firebase_auth.FirebaseAuth.instance,
_googleSignIn = googleSignIn ?? GoogleSignIn.standard(),
_getAppleCredentials =
getAppleCredentials ?? SignInWithApple.getAppleIDCredential,
_facebookAuth = facebookAuth ?? FacebookAuth.instance,
_twitterLogin = twitterLogin ??
TwitterLogin(
apiKey: const String.fromEnvironment('TWITTER_API_KEY'),
apiSecretKey: const String.fromEnvironment('TWITTER_API_SECRET'),
redirectURI: const String.fromEnvironment('TWITTER_REDIRECT_URI'),
) {
user.listen(_onUserChanged);
}
final TokenStorage _tokenStorage;
final firebase_auth.FirebaseAuth _firebaseAuth;
final GoogleSignIn _googleSignIn;
final GetAppleCredentials _getAppleCredentials;
final FacebookAuth _facebookAuth;
final TwitterLogin _twitterLogin;
/// Stream of [AuthenticationUser] which will emit the current user when
/// the authentication state changes.
///
/// Emits [AuthenticationUser.anonymous] if the user is not authenticated.
@override
Stream<AuthenticationUser> get user {
return _firebaseAuth.authStateChanges().map((firebaseUser) {
return firebaseUser == null
? AuthenticationUser.anonymous
: firebaseUser.toUser;
});
}
/// Starts the Sign In with Apple Flow.
///
/// Throws a [LogInWithAppleFailure] if an exception occurs.
@override
Future<void> logInWithApple() async {
try {
final appleIdCredential = await _getAppleCredentials(
scopes: [
AppleIDAuthorizationScopes.email,
AppleIDAuthorizationScopes.fullName,
],
);
final oAuthProvider = firebase_auth.OAuthProvider('apple.com');
final credential = oAuthProvider.credential(
idToken: appleIdCredential.identityToken,
accessToken: appleIdCredential.authorizationCode,
);
await _firebaseAuth.signInWithCredential(credential);
} catch (error, stackTrace) {
Error.throwWithStackTrace(LogInWithAppleFailure(error), stackTrace);
}
}
/// Starts the Sign In with Google Flow.
///
/// Throws a [LogInWithGoogleCanceled] if the flow is canceled by the user.
/// Throws a [LogInWithGoogleFailure] if an exception occurs.
@override
Future<void> logInWithGoogle() async {
try {
final googleUser = await _googleSignIn.signIn();
if (googleUser == null) {
throw LogInWithGoogleCanceled(
Exception('Sign in with Google canceled'),
);
}
final googleAuth = await googleUser.authentication;
final credential = firebase_auth.GoogleAuthProvider.credential(
accessToken: googleAuth.accessToken,
idToken: googleAuth.idToken,
);
await _firebaseAuth.signInWithCredential(credential);
} on LogInWithGoogleCanceled {
rethrow;
} catch (error, stackTrace) {
Error.throwWithStackTrace(LogInWithGoogleFailure(error), stackTrace);
}
}
/// Starts the Sign In with Facebook Flow.
///
/// Throws a [LogInWithFacebookCanceled] if the flow is canceled by the user.
/// Throws a [LogInWithFacebookFailure] if an exception occurs.
@override
Future<void> logInWithFacebook() async {
try {
final loginResult = await _facebookAuth.login();
if (loginResult.status == LoginStatus.cancelled) {
throw LogInWithFacebookCanceled(
Exception('Sign in with Facebook canceled'),
);
} else if (loginResult.status == LoginStatus.failed) {
throw LogInWithFacebookFailure(
Exception(loginResult.message),
);
}
final accessToken = loginResult.accessToken?.token;
if (accessToken == null) {
throw LogInWithFacebookFailure(
Exception(
'Sign in with Facebook failed due to an empty access token',
),
);
}
final credential =
firebase_auth.FacebookAuthProvider.credential(accessToken);
await _firebaseAuth.signInWithCredential(credential);
} on LogInWithFacebookCanceled {
rethrow;
} catch (error, stackTrace) {
Error.throwWithStackTrace(LogInWithFacebookFailure(error), stackTrace);
}
}
/// Starts the Sign In with Twitter Flow.
///
/// Throws a [LogInWithTwitterCanceled] if the flow is canceled by the user.
/// Throws a [LogInWithTwitterFailure] if an exception occurs.
@override
Future<void> logInWithTwitter() async {
try {
final loginResult = await _twitterLogin.loginV2();
if (loginResult.status == TwitterLoginStatus.cancelledByUser) {
throw LogInWithTwitterCanceled(
Exception('Sign in with Twitter canceled'),
);
} else if (loginResult.status == TwitterLoginStatus.error) {
throw LogInWithTwitterFailure(
Exception(loginResult.errorMessage),
);
}
final authToken = loginResult.authToken;
final authTokenSecret = loginResult.authTokenSecret;
if (authToken == null || authTokenSecret == null) {
throw LogInWithTwitterFailure(
Exception(
'Sign in with Twitter failed due to invalid auth token or secret',
),
);
}
final credential = firebase_auth.TwitterAuthProvider.credential(
accessToken: authToken,
secret: authTokenSecret,
);
await _firebaseAuth.signInWithCredential(credential);
} on LogInWithTwitterCanceled {
rethrow;
} catch (error, stackTrace) {
Error.throwWithStackTrace(LogInWithTwitterFailure(error), stackTrace);
}
}
/// Sends an authentication link to the provided [email].
///
/// Opening the link redirects to the app with [appPackageName]
/// using Firebase Dynamic Links and authenticates the user
/// based on the provided email link.
///
/// Throws a [SendLoginEmailLinkFailure] if an exception occurs.
@override
Future<void> sendLoginEmailLink({
required String email,
required String appPackageName,
}) async {
try {
final redirectUrl = Uri.https(
const String.fromEnvironment('FLAVOR_DEEP_LINK_DOMAIN'),
const String.fromEnvironment('FLAVOR_DEEP_LINK_PATH'),
<String, String>{'email': email},
);
final actionCodeSettings = firebase_auth.ActionCodeSettings(
url: redirectUrl.toString(),
handleCodeInApp: true,
iOSBundleId: appPackageName,
androidPackageName: appPackageName,
androidInstallApp: true,
);
await _firebaseAuth.sendSignInLinkToEmail(
email: email,
actionCodeSettings: actionCodeSettings,
);
} catch (error, stackTrace) {
Error.throwWithStackTrace(SendLoginEmailLinkFailure(error), stackTrace);
}
}
/// Checks if an incoming [emailLink] is a sign-in with email link.
///
/// Throws a [IsLogInWithEmailLinkFailure] if an exception occurs.
@override
bool isLogInWithEmailLink({required String emailLink}) {
try {
return _firebaseAuth.isSignInWithEmailLink(emailLink);
} catch (error, stackTrace) {
Error.throwWithStackTrace(IsLogInWithEmailLinkFailure(error), stackTrace);
}
}
/// Signs in with the provided [emailLink].
///
/// Throws a [LogInWithEmailLinkFailure] if an exception occurs.
@override
Future<void> logInWithEmailLink({
required String email,
required String emailLink,
}) async {
try {
await _firebaseAuth.signInWithEmailLink(
email: email,
emailLink: emailLink,
);
} catch (error, stackTrace) {
Error.throwWithStackTrace(LogInWithEmailLinkFailure(error), stackTrace);
}
}
/// Signs out the current user which will emit
/// [AuthenticationUser.anonymous] from the [user] Stream.
///
/// Throws a [LogOutFailure] if an exception occurs.
@override
Future<void> logOut() async {
try {
await Future.wait([
_firebaseAuth.signOut(),
_googleSignIn.signOut(),
]);
} catch (error, stackTrace) {
Error.throwWithStackTrace(LogOutFailure(error), stackTrace);
}
}
/// Updates the user token in [TokenStorage] if the user is authenticated.
Future<void> _onUserChanged(AuthenticationUser user) async {
if (!user.isAnonymous) {
await _tokenStorage.saveToken(user.id);
} else {
await _tokenStorage.clearToken();
}
}
}
extension on firebase_auth.User {
AuthenticationUser get toUser {
return AuthenticationUser(
id: uid,
email: email,
name: displayName,
photo: photoURL,
isNewUser: metadata.creationTime == metadata.lastSignInTime,
);
}
}
| news_toolkit/flutter_news_example/packages/authentication_client/firebase_authentication_client/lib/src/firebase_authentication_client.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/authentication_client/firebase_authentication_client/lib/src/firebase_authentication_client.dart",
"repo_id": "news_toolkit",
"token_count": 3605
} | 936 |
# email_launcher
[![style: very good analysis][very_good_analysis_badge]][very_good_analysis_link]
[![License: MIT][license_badge]][license_link]
A package to open an external email app on Android and iOS.
[license_badge]: https://img.shields.io/badge/license-MIT-blue.svg
[license_link]: https://opensource.org/licenses/MIT
[very_good_analysis_badge]: https://img.shields.io/badge/style-very_good_analysis-B22C89.svg
[very_good_analysis_link]: https://pub.dev/packages/very_good_analysis | news_toolkit/flutter_news_example/packages/email_launcher/README.md/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/email_launcher/README.md",
"repo_id": "news_toolkit",
"token_count": 173
} | 937 |
# in_app_purchase_repository
[![style: very good analysis][very_good_analysis_badge]][very_good_analysis_link]
[![License: MIT][license_badge]][license_link]
A repository that manages user in-app purchases.
[license_badge]: https://img.shields.io/badge/license-MIT-blue.svg
[license_link]: https://opensource.org/licenses/MIT
[very_good_analysis_badge]: https://img.shields.io/badge/style-very_good_analysis-B22C89.svg
[very_good_analysis_link]: https://pub.dev/packages/very_good_analysis
| news_toolkit/flutter_news_example/packages/in_app_purchase_repository/README.md/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/in_app_purchase_repository/README.md",
"repo_id": "news_toolkit",
"token_count": 177
} | 938 |
import 'package:flutter/material.dart' hide ProgressIndicator;
import 'package:news_blocks/news_blocks.dart';
import 'package:news_blocks_ui/src/widgets/widgets.dart';
/// {@template banner_ad}
/// A reusable banner ad block widget.
/// {@endtemplate}
class BannerAd extends StatelessWidget {
/// {@macro banner_ad}
const BannerAd({
required this.block,
required this.adFailedToLoadTitle,
super.key,
});
/// The associated [BannerAdBlock] instance.
final BannerAdBlock block;
/// The title displayed when this ad fails to load.
final String adFailedToLoadTitle;
@override
Widget build(BuildContext context) {
return BannerAdContainer(
size: block.size,
child: BannerAdContent(
size: block.size,
adFailedToLoadTitle: adFailedToLoadTitle,
),
);
}
}
| news_toolkit/flutter_news_example/packages/news_blocks_ui/lib/src/banner_ad.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/news_blocks_ui/lib/src/banner_ad.dart",
"repo_id": "news_toolkit",
"token_count": 289
} | 939 |
import 'package:flutter/material.dart';
import 'package:news_blocks/news_blocks.dart';
import 'package:news_blocks_ui/news_blocks_ui.dart';
/// {@template post_medium}
/// A reusable post medium block widget.
/// {@endtemplate}
class PostMedium extends StatelessWidget {
/// {@macro post_medium}
const PostMedium({required this.block, this.onPressed, super.key});
/// The associated [PostMediumBlock] instance.
final PostMediumBlock block;
/// An optional callback which is invoked when the action is triggered.
/// A [Uri] from the associated [BlockAction] is provided to the callback.
final BlockActionCallback? onPressed;
@override
Widget build(BuildContext context) {
return GestureDetector(
onTap: () =>
block.hasNavigationAction ? onPressed?.call(block.action!) : null,
child: block.isContentOverlaid
? PostMediumOverlaidLayout(
title: block.title,
imageUrl: block.imageUrl!,
)
: PostMediumDescriptionLayout(
title: block.title,
imageUrl: block.imageUrl!,
description: block.description,
publishedAt: block.publishedAt,
author: block.author,
),
);
}
}
| news_toolkit/flutter_news_example/packages/news_blocks_ui/lib/src/post_medium/post_medium.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/news_blocks_ui/lib/src/post_medium/post_medium.dart",
"repo_id": "news_toolkit",
"token_count": 482
} | 940 |
/// {@template ads_retry_policy}
/// A retry policy for ads.
/// {@endtemplate}
class AdsRetryPolicy {
/// {@macro ads_retry_policy}
const AdsRetryPolicy({
this.maxRetryCount = 3,
this.retryIntervals = const [
Duration(seconds: 1),
Duration(seconds: 2),
Duration(seconds: 4),
],
});
/// The maximum number of retries to load an ad.
final int maxRetryCount;
/// The interval between retries to load an ad.
final List<Duration> retryIntervals;
/// Returns the interval for the given retry.
Duration getIntervalForRetry(int retry) {
if (retry <= 0 || retry > maxRetryCount) return Duration.zero;
return retryIntervals[retry - 1];
}
}
| news_toolkit/flutter_news_example/packages/news_blocks_ui/lib/src/widgets/ads_retry_policy.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/news_blocks_ui/lib/src/widgets/ads_retry_policy.dart",
"repo_id": "news_toolkit",
"token_count": 246
} | 941 |
import 'dart:async';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:video_player_platform_interface/video_player_platform_interface.dart';
class FakeVideoPlayerPlatform extends VideoPlayerPlatform {
Completer<bool> initialized = Completer<bool>();
List<String> calls = <String>[];
List<DataSource> dataSources = <DataSource>[];
final Map<int, StreamController<VideoEvent>> streams =
<int, StreamController<VideoEvent>>{};
bool forceInitError = false;
int nextTextureId = 0;
final Map<int, Duration> _positions = <int, Duration>{};
@override
Future<int?> create(DataSource dataSource) async {
calls.add('create');
final stream = StreamController<VideoEvent>();
streams[nextTextureId] = stream;
if (forceInitError) {
stream.addError(
PlatformException(
code: 'VideoError',
message: 'Video player had error XYZ',
),
);
} else {
stream.add(
VideoEvent(
eventType: VideoEventType.initialized,
size: const Size(100, 100),
duration: const Duration(seconds: 1),
),
);
}
dataSources.add(dataSource);
return nextTextureId++;
}
@override
Future<void> dispose(int textureId) async {
calls.add('dispose');
}
@override
Future<void> init() async {
calls.add('init');
initialized.complete(true);
}
@override
Stream<VideoEvent> videoEventsFor(int textureId) {
return streams[textureId]!.stream;
}
@override
Future<void> pause(int textureId) async {
calls.add('pause');
}
@override
Future<void> play(int textureId) async {
calls.add('play');
}
@override
Future<Duration> getPosition(int textureId) async {
calls.add('position');
return _positions[textureId] ?? Duration.zero;
}
@override
Future<void> seekTo(int textureId, Duration position) async {
calls.add('seekTo');
_positions[textureId] = position;
}
@override
Future<void> setLooping(int textureId, bool looping) async {
calls.add('setLooping');
}
@override
Future<void> setVolume(int textureId, double volume) async {
calls.add('setVolume');
}
@override
Future<void> setPlaybackSpeed(int textureId, double speed) async {
calls.add('setPlaybackSpeed');
}
@override
Future<void> setMixWithOthers(bool mixWithOthers) async {
calls.add('setMixWithOthers');
}
@override
Widget buildView(int textureId) {
return Texture(textureId: textureId);
}
}
| news_toolkit/flutter_news_example/packages/news_blocks_ui/test/helpers/fake_video_player_platform.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/news_blocks_ui/test/helpers/fake_video_player_platform.dart",
"repo_id": "news_toolkit",
"token_count": 924
} | 942 |
// ignore_for_file: unnecessary_const, prefer_const_constructors
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:mocktail_image_network/mocktail_image_network.dart';
import 'package:news_blocks/news_blocks.dart';
import 'package:news_blocks_ui/news_blocks_ui.dart';
import '../helpers/helpers.dart';
void main() {
group('PostGrid', () {
final postGridTileBlock = PostGridTileBlock(
id: '842e3193-86d2-4069-a7e6-f769faa6f970',
category: PostCategory.science,
author: 'SciTechDaily',
publishedAt: DateTime(2022, 5, 5),
imageUrl:
'https://scitechdaily.com/images/Qubit-Platform-Single-Electron-on-Solid-Neon.jpg',
title: 'The Quest for an Ideal Quantum Bit: New Qubit Breakthrough Could '
'Revolutionize Quantum Computing',
action: NavigateToArticleAction(
articleId: '842e3193-86d2-4069-a7e6-f769faa6f970',
),
);
testWidgets('renders correctly 5 PostGridTiles', (tester) async {
final gridGroupBlock = PostGridGroupBlock(
category: PostCategory.science,
tiles: List.generate(5, (index) => postGridTileBlock),
);
await mockNetworkImages(
() async => tester.pumpContentThemedApp(
CustomScrollView(
slivers: [
PostGrid(
gridGroupBlock: gridGroupBlock,
premiumText: 'Premium',
),
],
),
),
);
expect(find.byType(PostLarge), findsOneWidget);
await tester.ensureVisible(find.byType(PostMedium).last);
await tester.pumpAndSettle();
expect(find.byType(PostMedium), findsNWidgets(4));
});
testWidgets('renders correctly 1 PostGridTile', (tester) async {
final gridGroupBlock = PostGridGroupBlock(
category: PostCategory.science,
tiles: [postGridTileBlock],
);
await mockNetworkImages(
() async => tester.pumpContentThemedApp(
CustomScrollView(
slivers: [
PostGrid(gridGroupBlock: gridGroupBlock, premiumText: 'Premium'),
],
),
),
);
expect(find.byType(PostGrid), findsOneWidget);
expect(find.byType(PostLarge), findsOneWidget);
expect(find.byType(PostMedium), findsNothing);
});
testWidgets('handles empty tiles list', (tester) async {
final gridGroupBlock = PostGridGroupBlock(
category: PostCategory.science,
tiles: [],
);
await mockNetworkImages(
() async => tester.pumpContentThemedApp(
CustomScrollView(
slivers: [
PostGrid(gridGroupBlock: gridGroupBlock, premiumText: 'Premium'),
],
),
),
);
expect(find.byType(SliverToBoxAdapter), findsNothing);
expect(find.byType(PostLarge), findsNothing);
expect(find.byType(PostMedium), findsNothing);
});
});
}
| news_toolkit/flutter_news_example/packages/news_blocks_ui/test/src/post_grid_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/news_blocks_ui/test/src/post_grid_test.dart",
"repo_id": "news_toolkit",
"token_count": 1297
} | 943 |
// ignore_for_file: unnecessary_const, prefer_const_constructors
import 'package:flutter/material.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:mocktail_image_network/mocktail_image_network.dart';
import 'package:news_blocks/news_blocks.dart';
import 'package:news_blocks_ui/news_blocks_ui.dart';
import 'package:news_blocks_ui/src/widgets/widgets.dart';
import '../helpers/helpers.dart';
void main() {
const imageUrl =
'https://cdn.vox-cdn.com/thumbor/OTpmptgr7XcTVAJ27UBvIxl0vrg='
'/0x146:2040x1214/fit-in/1200x630/cdn.vox-cdn.com/uploads/chorus_asset'
'/file/22049166/shollister_201117_4303_0003.0.jpg';
group('SlideshowIntroduction', () {
testWidgets('renders title', (tester) async {
final block = SlideshowIntroductionBlock(
title: 'title',
coverImageUrl: imageUrl,
);
await mockNetworkImages(
() async => tester.pumpContentThemedApp(
SingleChildScrollView(
child: Column(
children: [
SlideshowIntroduction(
block: block,
slideshowText: 'slideshowText',
),
],
),
),
),
);
expect(find.text(block.title), findsOneWidget);
});
testWidgets('renders SlideshowCategory', (tester) async {
final block = SlideshowIntroductionBlock(
title: 'title',
coverImageUrl: imageUrl,
);
await mockNetworkImages(
() async => tester.pumpContentThemedApp(
SingleChildScrollView(
child: Column(
children: [
SlideshowIntroduction(
block: block,
slideshowText: 'slideshowText',
),
],
),
),
),
);
expect(
find.byWidgetPredicate(
(widget) =>
widget is SlideshowCategory && widget.isIntroduction == true,
),
findsOneWidget,
);
});
testWidgets('renders cover image', (tester) async {
final block = SlideshowIntroductionBlock(
title: 'title',
coverImageUrl: imageUrl,
);
await mockNetworkImages(
() async => tester.pumpContentThemedApp(
SingleChildScrollView(
child: Column(
children: [
SlideshowIntroduction(
block: block,
slideshowText: 'slideshowText',
),
],
),
),
),
);
expect(
find.byWidgetPredicate(
(widget) => widget is PostLargeImage && widget.imageUrl == imageUrl,
),
findsOneWidget,
);
});
testWidgets(
'onPressed is called with action when tapped',
(tester) async {
final action = NavigateToArticleAction(articleId: 'articleId');
final actions = <BlockAction>[];
final block = SlideshowIntroductionBlock(
title: 'title',
coverImageUrl: imageUrl,
action: action,
);
await mockNetworkImages(
() async => tester.pumpContentThemedApp(
SingleChildScrollView(
child: Column(
children: [
SlideshowIntroduction(
block: block,
slideshowText: 'slideshowText',
onPressed: actions.add,
),
],
),
),
),
);
await tester.ensureVisible(find.byType(SlideshowIntroduction));
await tester.tap(find.byType(SlideshowIntroduction));
await tester.pump();
expect(actions, equals([action]));
},
);
});
}
| news_toolkit/flutter_news_example/packages/news_blocks_ui/test/src/slideshow_introduction_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/news_blocks_ui/test/src/slideshow_introduction_test.dart",
"repo_id": "news_toolkit",
"token_count": 1878
} | 944 |
// ignore_for_file: prefer_const_constructors
import 'package:flutter/material.dart' hide ProgressIndicator;
import 'package:flutter_test/flutter_test.dart';
import 'package:news_blocks_ui/src/widgets/widgets.dart';
import 'package:video_player/video_player.dart';
import 'package:video_player_platform_interface/video_player_platform_interface.dart';
import '../../helpers/helpers.dart';
void main() {
group('InlineVideo', () {
setUp(
() {
final fakeVideoPlayerPlatform = FakeVideoPlayerPlatform();
VideoPlayerPlatform.instance = fakeVideoPlayerPlatform;
},
);
testWidgets('renders progressIndicator when loading', (tester) async {
const progressIndicatorKey = Key('__progress_indicator__');
final controller = FakeVideoPlayerController();
await tester.pumpApp(
InlineVideo(
videoUrl: 'videoUrl',
progressIndicator: ProgressIndicator(key: progressIndicatorKey),
videoPlayerControllerBuilder: (_) => controller,
),
);
expect(find.byKey(progressIndicatorKey), findsOneWidget);
});
testWidgets('renders VideoPlayer when initialized', (tester) async {
final controller = FakeVideoPlayerController();
controller.value = controller.value.copyWith(
isInitialized: true,
size: Size(100, 100),
);
await tester.pumpApp(
InlineVideo(
videoUrl: 'videoUrl',
progressIndicator: ProgressIndicator(),
videoPlayerControllerBuilder: (_) => controller,
),
);
expect(find.byType(VideoPlayer), findsOneWidget);
});
testWidgets('renders VideoPlayer when initialized', (tester) async {
final controller = FakeVideoPlayerController();
controller.value = controller.value.copyWith(
isInitialized: true,
size: Size(100, 100),
);
await tester.pumpApp(
InlineVideo(
videoUrl: 'videoUrl',
progressIndicator: ProgressIndicator(),
videoPlayerControllerBuilder: (_) => controller,
),
);
expect(find.byType(VideoPlayer), findsOneWidget);
});
testWidgets(
'plays video when tapped '
'and video is not playing', (tester) async {
final controller = FakeVideoPlayerController();
controller.value = controller.value.copyWith(
isInitialized: true,
size: Size(100, 100),
);
await tester.pumpApp(
InlineVideo(
videoUrl: 'videoUrl',
progressIndicator: ProgressIndicator(),
videoPlayerControllerBuilder: (_) => controller,
),
);
await tester.tap(find.byKey(Key('inlineVideo_gestureDetector')));
await tester.pump();
expect(controller.playCalled, equals(1));
expect(controller.pauseCalled, equals(0));
});
testWidgets(
'pauses video when tapped '
'and video is playing', (tester) async {
final controller = FakeVideoPlayerController();
controller.value = controller.value.copyWith(
isInitialized: true,
size: Size(100, 100),
);
await tester.pumpApp(
InlineVideo(
videoUrl: 'videoUrl',
progressIndicator: ProgressIndicator(),
videoPlayerControllerBuilder: (_) => controller,
),
);
controller
..textureId = 123
..value = controller.value.copyWith(isPlaying: true);
await tester.pump();
await tester.tap(find.byKey(Key('inlineVideo_gestureDetector')));
await tester.pump();
expect(controller.playCalled, equals(0));
expect(controller.pauseCalled, equals(1));
});
testWidgets('builds VideoPlayerController with videoUrl', (tester) async {
const videoUrl = 'videoUrl';
late String capturedVideoUrl;
await tester.pumpApp(
InlineVideo(
videoUrl: videoUrl,
progressIndicator: ProgressIndicator(),
videoPlayerControllerBuilder: (url) {
capturedVideoUrl = url.toString();
return FakeVideoPlayerController();
},
),
);
expect(capturedVideoUrl, equals(videoUrl));
});
});
}
class FakeVideoPlayerController extends ValueNotifier<VideoPlayerValue>
implements VideoPlayerController {
FakeVideoPlayerController()
: super(VideoPlayerValue(duration: Duration.zero));
int playCalled = 0;
int pauseCalled = 0;
@override
Future<void> dispose() async {
super.dispose();
}
@override
int textureId = VideoPlayerController.kUninitializedTextureId;
@override
String get dataSource => '';
@override
Map<String, String> get httpHeaders => <String, String>{};
@override
DataSourceType get dataSourceType => DataSourceType.file;
@override
String get package => '';
@override
Future<Duration> get position async => value.position;
@override
Future<void> seekTo(Duration moment) async {}
@override
Future<void> setVolume(double volume) async {}
@override
Future<void> setPlaybackSpeed(double speed) async {}
@override
Future<void> initialize() async {}
@override
Future<void> pause() async => pauseCalled++;
@override
Future<void> play() async => playCalled++;
@override
Future<void> setLooping(bool looping) async {}
@override
VideoFormat? get formatHint => null;
@override
Future<ClosedCaptionFile> get closedCaptionFile => _loadClosedCaption();
@override
VideoPlayerOptions? get videoPlayerOptions => null;
@override
void setCaptionOffset(Duration delay) {}
@override
Future<void> setClosedCaptionFile(
Future<ClosedCaptionFile>? closedCaptionFile,
) async {}
}
Future<ClosedCaptionFile> _loadClosedCaption() async =>
_FakeClosedCaptionFile();
class _FakeClosedCaptionFile extends ClosedCaptionFile {
@override
List<Caption> get captions {
return <Caption>[
const Caption(
text: 'one',
number: 0,
start: Duration(milliseconds: 100),
end: Duration(milliseconds: 200),
),
const Caption(
text: 'two',
number: 1,
start: Duration(milliseconds: 300),
end: Duration(milliseconds: 400),
),
];
}
}
| news_toolkit/flutter_news_example/packages/news_blocks_ui/test/src/widgets/inline_video_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/news_blocks_ui/test/src/widgets/inline_video_test.dart",
"repo_id": "news_toolkit",
"token_count": 2382
} | 945 |
# news_repository
[![style: very good analysis][very_good_analysis_badge]][very_good_analysis_link]
[![License: MIT][license_badge]][license_link]
A repository that manages content feed data.
[license_badge]: https://img.shields.io/badge/license-MIT-blue.svg
[license_link]: https://opensource.org/licenses/MIT
[very_good_analysis_badge]: https://img.shields.io/badge/style-very_good_analysis-B22C89.svg
[very_good_analysis_link]: https://pub.dev/packages/very_good_analysis | news_toolkit/flutter_news_example/packages/news_repository/README.md/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/news_repository/README.md",
"repo_id": "news_toolkit",
"token_count": 169
} | 946 |
export 'src/notifications_client.dart';
| news_toolkit/flutter_news_example/packages/notifications_client/notifications_client/lib/notifications_client.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/notifications_client/notifications_client/lib/notifications_client.dart",
"repo_id": "news_toolkit",
"token_count": 13
} | 947 |
part of 'notifications_repository.dart';
/// Storage keys for the [NotificationsStorage].
abstract class NotificationsStorageKeys {
/// Whether the notifications are enabled.
static const notificationsEnabled = '__notifications_enabled_storage_key__';
/// The list of user's categories preferences.
static const categoriesPreferences = '__categories_preferences_storage_key__';
}
/// {@template notifications_storage}
/// Storage for the [NotificationsRepository].
/// {@endtemplate}
class NotificationsStorage {
/// {@macro notifications_storage}
const NotificationsStorage({
required Storage storage,
}) : _storage = storage;
final Storage _storage;
/// Sets the notifications enabled to [enabled] in Storage.
Future<void> setNotificationsEnabled({required bool enabled}) =>
_storage.write(
key: NotificationsStorageKeys.notificationsEnabled,
value: enabled.toString(),
);
/// Fetches the notifications enabled value from Storage.
Future<bool> fetchNotificationsEnabled() async =>
(await _storage.read(key: NotificationsStorageKeys.notificationsEnabled))
?.parseBool() ??
false;
/// Sets the categories preferences to [categories] in Storage.
Future<void> setCategoriesPreferences({
required Set<Category> categories,
}) async {
final categoriesEncoded = json.encode(
categories.map((category) => category.name).toList(),
);
await _storage.write(
key: NotificationsStorageKeys.categoriesPreferences,
value: categoriesEncoded,
);
}
/// Fetches the categories preferences value from Storage.
Future<Set<Category>?> fetchCategoriesPreferences() async {
final categories = await _storage.read(
key: NotificationsStorageKeys.categoriesPreferences,
);
if (categories == null) {
return null;
}
return List<String>.from(json.decode(categories) as List)
.map(Category.fromString)
.toSet();
}
}
extension _BoolFromStringParsing on String {
bool parseBool() {
return toLowerCase() == 'true';
}
}
| news_toolkit/flutter_news_example/packages/notifications_repository/lib/src/notifications_storage.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/notifications_repository/lib/src/notifications_storage.dart",
"repo_id": "news_toolkit",
"token_count": 649
} | 948 |
name: permission_client
description: A client that handles requesting permissions on a device.
version: 1.0.0+1
publish_to: none
environment:
sdk: ">=3.0.0 <4.0.0"
dependencies:
flutter:
sdk: flutter
permission_handler: ^11.0.0
dev_dependencies:
flutter_test:
sdk: flutter
mocktail: ^1.0.2
very_good_analysis: ^5.1.0
| news_toolkit/flutter_news_example/packages/permission_client/pubspec.yaml/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/permission_client/pubspec.yaml",
"repo_id": "news_toolkit",
"token_count": 139
} | 949 |
// ignore_for_file: prefer_const_constructors
import 'package:flutter_test/flutter_test.dart';
import 'package:mocktail/mocktail.dart';
import 'package:plugin_platform_interface/plugin_platform_interface.dart';
import 'package:share_launcher/share_launcher.dart';
import 'package:share_plus_platform_interface/share_plus_platform_interface.dart';
class MockSharePlatform extends Mock
with MockPlatformInterfaceMixin
implements SharePlatform {}
void main() {
group('ShareFailure', () {
test('supports value comparison', () {
final shareFailure1 = ShareFailure('error');
final shareFailure2 = ShareFailure('error');
expect(shareFailure1, equals(shareFailure2));
});
});
group('ShareLauncher', () {
TestWidgetsFlutterBinding.ensureInitialized();
test('calls shareProvider with text', () async {
var called = false;
final shareLauncher = ShareLauncher(
shareProvider: (String text) async {
called = true;
expect(text, equals('text'));
},
);
await shareLauncher.share(text: 'text');
expect(called, isTrue);
});
test('throws ShareFailure when shareLauncher throws', () async {
final shareLauncher = ShareLauncher(
shareProvider: (String text) => throw Exception(),
);
expect(shareLauncher.share(text: 'text'), throwsA(isA<ShareFailure>()));
});
test(
'calls default ShareProvider with text '
'when shareProvider not provided ', () async {
var called = false;
SharePlatform.instance = MockSharePlatform();
when(() => SharePlatform.instance.share(any(that: isA<String>())))
.thenAnswer((_) async => called = true);
await ShareLauncher().share(text: 'text');
expect(called, isTrue);
});
});
}
| news_toolkit/flutter_news_example/packages/share_launcher/test/src/share_launcher_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/share_launcher/test/src/share_launcher_test.dart",
"repo_id": "news_toolkit",
"token_count": 646
} | 950 |
include: package:very_good_analysis/analysis_options.5.1.0.yaml
| news_toolkit/flutter_news_example/packages/storage/storage/analysis_options.yaml/0 | {
"file_path": "news_toolkit/flutter_news_example/packages/storage/storage/analysis_options.yaml",
"repo_id": "news_toolkit",
"token_count": 23
} | 951 |
name: flutter_news_example
version: 0.0.1+1
publish_to: none
environment:
sdk: ">=3.0.0 <4.0.0"
flutter: ">=3.7.6"
dependencies:
ads_consent_client:
path: packages/ads_consent_client
analytics_repository:
path: packages/analytics_repository
app_ui:
path: packages/app_ui
article_repository:
path: packages/article_repository
authentication_client:
path: packages/authentication_client/authentication_client
bloc: ^8.1.0
bloc_concurrency: ^0.2.0
clock: ^1.1.0
collection: ^1.16.0
deep_link_client:
path: packages/deep_link_client
email_launcher:
path: packages/email_launcher
equatable: ^2.0.3
firebase_analytics: ^10.0.3
firebase_auth_platform_interface: ^7.0.9
firebase_authentication_client:
path: packages/authentication_client/firebase_authentication_client
firebase_core: ^2.24.2
firebase_crashlytics: ^3.0.3
firebase_dynamic_links: ^5.0.3
firebase_messaging: ^14.0.3
firebase_notifications_client:
path: packages/notifications_client/firebase_notifications_client
flow_builder: ^0.0.7
flutter:
sdk: flutter
flutter_bloc: ^8.0.1
flutter_localizations:
sdk: flutter
flutter_news_example_api:
path: api
flutter_svg: ^2.0.5
font_awesome_flutter: ^10.1.0
form_inputs:
path: packages/form_inputs
google_mobile_ads: ^4.0.0
hydrated_bloc: ^9.0.0
in_app_purchase_repository:
path: packages/in_app_purchase_repository
intl: ^0.19.0
json_annotation: ^4.7.0
mockingjay: ^0.4.0
news_blocks:
path: api/packages/news_blocks
news_blocks_ui:
path: packages/news_blocks_ui
news_repository:
path: packages/news_repository
notifications_repository:
path: packages/notifications_repository
package_info_client:
path: packages/package_info_client
path_provider: ^2.0.2
permission_client:
path: packages/permission_client
persistent_storage:
path: packages/storage/persistent_storage
platform: ^3.0.2
purchase_client:
path: packages/purchase_client
share_launcher:
path: packages/share_launcher
shared_preferences: ^2.0.15
sliver_tools: ^0.2.9
stream_transform: ^2.0.0
test: ^1.21.4
token_storage:
path: packages/authentication_client/token_storage
url_launcher: ^6.0.9
user_repository:
path: packages/user_repository
very_good_analysis: ^5.1.0
video_player_platform_interface: ^6.0.1
visibility_detector: ^0.4.0+2
dev_dependencies:
bloc_test: ^9.0.3
build_runner: ^2.0.3
build_verify: ^3.0.0
build_version: ^2.0.3
fake_async: ^1.3.0
flutter_test:
sdk: flutter
json_serializable: ^6.3.1
mocktail: ^1.0.2
mocktail_image_network: ^1.0.0
dependency_overrides:
intl: ^0.19.0
flutter:
generate: true
uses-material-design: true
| news_toolkit/flutter_news_example/pubspec.yaml/0 | {
"file_path": "news_toolkit/flutter_news_example/pubspec.yaml",
"repo_id": "news_toolkit",
"token_count": 1151
} | 952 |
// ignore_for_file: prefer_const_constructors
import 'package:flutter_news_example/feed/feed.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:news_blocks/news_blocks.dart';
void main() {
group('FeedState', () {
test('initial has correct status', () {
expect(
FeedState.initial().status,
equals(FeedStatus.initial),
);
});
test('supports value comparisons', () {
expect(
FeedState.initial(),
equals(FeedState.initial()),
);
});
group('copyWith', () {
test(
'returns same object '
'when no properties are passed', () {
expect(
FeedState.initial().copyWith(),
equals(FeedState.initial()),
);
});
test(
'returns object with updated status '
'when status is passed', () {
expect(
FeedState.initial().copyWith(
status: FeedStatus.loading,
),
equals(
FeedState(
status: FeedStatus.loading,
),
),
);
});
test(
'returns object with updated feed '
'when feed is passed', () {
final feed = {
Category.health: [SectionHeaderBlock(title: 'Health')],
};
expect(
FeedState(status: FeedStatus.populated).copyWith(feed: feed),
equals(
FeedState(
status: FeedStatus.populated,
feed: feed,
),
),
);
});
test(
'returns object with updated hasMoreNews '
'when hasMoreNews is passed', () {
final hasMoreNews = {
Category.health: false,
};
expect(
FeedState(status: FeedStatus.populated)
.copyWith(hasMoreNews: hasMoreNews),
equals(
FeedState(
status: FeedStatus.populated,
hasMoreNews: hasMoreNews,
),
),
);
});
});
});
}
| news_toolkit/flutter_news_example/test/feed/bloc/feed_state_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/test/feed/bloc/feed_state_test.dart",
"repo_id": "news_toolkit",
"token_count": 1014
} | 953 |
// ignore_for_file: prefer_const_constructors
import 'dart:async';
import 'package:bloc_test/bloc_test.dart';
import 'package:flutter_news_example/login/login.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:mocktail/mocktail.dart';
import 'package:user_repository/user_repository.dart';
class MockUserRepository extends Mock implements UserRepository {}
class MockUser extends Mock implements User {}
void main() {
group('LoginWithEmailLinkBloc', () {
late UserRepository userRepository;
late StreamController<Uri> incomingEmailLinksController;
setUp(() {
userRepository = MockUserRepository();
incomingEmailLinksController = StreamController<Uri>();
when(() => userRepository.incomingEmailLinks)
.thenAnswer((_) => incomingEmailLinksController.stream);
});
test('initial state is LoginWithEmailLinkState', () {
expect(
LoginWithEmailLinkBloc(
userRepository: userRepository,
).state,
LoginWithEmailLinkState(),
);
});
group('on incomingEmailLinks stream update', () {
const email = '[email protected]';
final user = MockUser();
final continueUrl =
Uri.https('continue.link', '', <String, String>{'email': email});
final validEmailLink = Uri.https(
'email.link',
'/email_login',
<String, String>{'continueUrl': continueUrl.toString()},
);
final emailLinkWithoutContinueUrl = Uri.https(
'email.link',
'/email_login',
);
final emailLinkWithInvalidContinueUrl = Uri.https(
'email.link',
'/email_login',
<String, String>{'continueUrl': Uri.https('').toString()},
);
setUp(() {
when(() => userRepository.user)
.thenAnswer((invocation) => Stream.value(user));
when(
() => userRepository.logInWithEmailLink(
email: any(named: 'email'),
emailLink: any(named: 'emailLink'),
),
).thenAnswer((_) async {});
});
blocTest<LoginWithEmailLinkBloc, LoginWithEmailLinkState>(
'emits [loading, failure] '
'when the user is already logged in',
setUp: () {
when(() => user.isAnonymous).thenReturn(false);
},
build: () => LoginWithEmailLinkBloc(userRepository: userRepository),
act: (bloc) => incomingEmailLinksController.add(validEmailLink),
expect: () => const <LoginWithEmailLinkState>[
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.loading),
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.failure)
],
);
blocTest<LoginWithEmailLinkBloc, LoginWithEmailLinkState>(
'emits [loading, failure] '
'when the user is anonymous and '
'continueUrl is missing in the email link',
setUp: () {
when(() => user.isAnonymous).thenReturn(true);
},
build: () => LoginWithEmailLinkBloc(userRepository: userRepository),
act: (bloc) =>
incomingEmailLinksController.add(emailLinkWithoutContinueUrl),
expect: () => const <LoginWithEmailLinkState>[
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.loading),
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.failure)
],
);
blocTest<LoginWithEmailLinkBloc, LoginWithEmailLinkState>(
'emits [loading, failure] '
'when the user is anonymous and '
'invalid continueUrl is provided in the email link',
setUp: () {
when(() => user.isAnonymous).thenReturn(true);
},
build: () => LoginWithEmailLinkBloc(userRepository: userRepository),
act: (bloc) =>
incomingEmailLinksController.add(emailLinkWithInvalidContinueUrl),
expect: () => const <LoginWithEmailLinkState>[
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.loading),
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.failure)
],
);
blocTest<LoginWithEmailLinkBloc, LoginWithEmailLinkState>(
'emits [loading, failure] '
'when the user is anonymous and '
'valid continueUrl is provided in the email link and '
'logInWithEmailLink fails',
setUp: () {
when(() => user.isAnonymous).thenReturn(true);
when(
() => userRepository.logInWithEmailLink(
email: any(named: 'email'),
emailLink: any(named: 'emailLink'),
),
).thenThrow(Exception());
},
build: () => LoginWithEmailLinkBloc(userRepository: userRepository),
act: (bloc) => incomingEmailLinksController.add(validEmailLink),
expect: () => const <LoginWithEmailLinkState>[
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.loading),
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.failure)
],
);
blocTest<LoginWithEmailLinkBloc, LoginWithEmailLinkState>(
'emits [loading, success] '
'when the user is anonymous and '
'valid continueUrl is provided in the email link and '
'logInWithEmailLink succeeds',
setUp: () {
when(() => user.isAnonymous).thenReturn(true);
},
build: () => LoginWithEmailLinkBloc(userRepository: userRepository),
act: (bloc) => incomingEmailLinksController.add(validEmailLink),
expect: () => const <LoginWithEmailLinkState>[
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.loading),
LoginWithEmailLinkState(status: LoginWithEmailLinkStatus.success)
],
);
blocTest<LoginWithEmailLinkBloc, LoginWithEmailLinkState>(
'calls logInWithEmailLink',
setUp: () {
when(() => user.isAnonymous).thenReturn(true);
},
build: () => LoginWithEmailLinkBloc(userRepository: userRepository),
act: (bloc) => incomingEmailLinksController.add(validEmailLink),
verify: (_) {
verify(
() => userRepository.logInWithEmailLink(
email: email,
emailLink: validEmailLink.toString(),
),
).called(1);
},
);
});
group('close', () {
blocTest<LoginWithEmailLinkBloc, LoginWithEmailLinkState>(
'cancels UserRepository.incomingEmailLinks subscription',
build: () => LoginWithEmailLinkBloc(
userRepository: userRepository,
),
tearDown: () {
expect(incomingEmailLinksController.hasListener, isFalse);
},
);
});
});
}
| news_toolkit/flutter_news_example/test/login/bloc/login_with_email_link_bloc_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/test/login/bloc/login_with_email_link_bloc_test.dart",
"repo_id": "news_toolkit",
"token_count": 2731
} | 954 |
// ignore_for_file: prefer_const_constructors
// ignore_for_file: prefer_const_literals_to_create_immutables
import 'package:bloc_test/bloc_test.dart';
import 'package:flutter_news_example/newsletter/newsletter.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:form_inputs/form_inputs.dart';
import 'package:mocktail/mocktail.dart';
import 'package:news_repository/news_repository.dart';
class MockNewsRepository extends Mock implements NewsRepository {}
void main() {
late NewsRepository newsRepository;
const emailValid = Email.dirty('test');
setUpAll(() {
newsRepository = MockNewsRepository();
});
group('NewsletterBloc', () {
group('on NewsletterSubscribed', () {
blocTest<NewsletterBloc, NewsletterState>(
'emits [loading, success] '
'when subscribeToNewsletter succeeds',
setUp: () => when(
() => newsRepository.subscribeToNewsletter(
email: any(named: 'email'),
),
).thenAnswer(Future.value),
seed: () => NewsletterState(email: Email.dirty('test'), isValid: true),
build: () => NewsletterBloc(newsRepository: newsRepository),
act: (bloc) => bloc.add(NewsletterSubscribed()),
expect: () => <NewsletterState>[
NewsletterState(
status: NewsletterStatus.loading,
email: emailValid,
isValid: true,
),
NewsletterState(
status: NewsletterStatus.success,
email: emailValid,
isValid: true,
),
],
);
blocTest<NewsletterBloc, NewsletterState>(
'emits [loading, failed] '
'when subscribeToNewsletter throws',
setUp: () => when(
() => newsRepository.subscribeToNewsletter(
email: any(named: 'email'),
),
).thenThrow(Error.new),
seed: () => NewsletterState(email: Email.dirty('test')),
build: () => NewsletterBloc(newsRepository: newsRepository),
act: (bloc) => bloc.add(NewsletterSubscribed()),
expect: () => <NewsletterState>[
NewsletterState(
status: NewsletterStatus.loading,
email: emailValid,
),
NewsletterState(
status: NewsletterStatus.failure,
email: emailValid,
),
],
);
blocTest<NewsletterBloc, NewsletterState>(
'emits nothing '
'when email is empty',
seed: () => NewsletterState(email: Email.dirty()),
build: () => NewsletterBloc(newsRepository: newsRepository),
act: (bloc) => bloc.add(NewsletterSubscribed()),
expect: () => <NewsletterState>[],
);
});
group('on EmailChanged', () {
final initialState = NewsletterState(email: Email.dirty('test'));
const newEmail = '[email protected]';
blocTest<NewsletterBloc, NewsletterState>(
'emits changed state '
'when emailChanged',
seed: () => initialState,
build: () => NewsletterBloc(newsRepository: newsRepository),
act: (bloc) => bloc.add(EmailChanged(email: newEmail)),
expect: () => <NewsletterState>[
initialState.copyWith(email: Email.dirty(newEmail), isValid: true)
],
);
});
});
}
| news_toolkit/flutter_news_example/test/newsletter/bloc/newsletter_bloc_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/test/newsletter/bloc/newsletter_bloc_test.dart",
"repo_id": "news_toolkit",
"token_count": 1362
} | 955 |
// ignore_for_file: prefer_const_constructors
import 'package:flutter_news_example/search/search.dart';
import 'package:flutter_news_example_api/client.dart';
import 'package:flutter_test/flutter_test.dart';
void main() {
group('SearchState', () {
test('has correct initial status', () {
expect(
const SearchState.initial().status,
equals(SearchStatus.initial),
);
});
group('copyWith', () {
test(
'returns same object '
'when no properties are passed', () {
expect(
SearchState.initial().copyWith(),
equals(SearchState.initial()),
);
});
test(
'returns object with updated status '
'when status is passed', () {
expect(
SearchState.initial().copyWith(
status: SearchStatus.loading,
),
equals(
SearchState(
status: SearchStatus.loading,
articles: const [],
topics: const [],
searchType: SearchType.popular,
),
),
);
});
test(
'returns object with updated articles '
'when articles are passed', () {
final articles = [DividerHorizontalBlock()];
expect(
SearchState.initial().copyWith(articles: articles),
equals(
SearchState(
articles: articles,
status: SearchStatus.initial,
topics: const [],
searchType: SearchType.popular,
),
),
);
});
test(
'returns object with updated topics '
'when topics are passed', () {
final topics = ['Topic'];
expect(
SearchState.initial().copyWith(topics: topics),
equals(
SearchState(
topics: topics,
status: SearchStatus.initial,
articles: const [],
searchType: SearchType.popular,
),
),
);
});
test(
'returns object with updated searchType '
'when searchType is passed', () {
expect(
SearchState.initial().copyWith(
searchType: SearchType.relevant,
),
equals(
SearchState(
topics: const [],
status: SearchStatus.initial,
articles: const [],
searchType: SearchType.relevant,
),
),
);
});
});
});
}
| news_toolkit/flutter_news_example/test/search/bloc/search_state_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/test/search/bloc/search_state_test.dart",
"repo_id": "news_toolkit",
"token_count": 1260
} | 956 |
// ignore_for_file: prefer_const_constructors
import 'package:app_ui/app_ui.dart';
import 'package:flutter/material.dart';
import 'package:flutter_news_example/terms_of_service/terms_of_service.dart';
import 'package:flutter_test/flutter_test.dart';
import '../../helpers/helpers.dart';
void main() {
const tapMeText = 'Tap Me';
group('TermsOfServicePage', () {
group('route', () {
test('has a route', () {
expect(TermsOfServicePage.route(), isA<MaterialPageRoute<void>>());
});
testWidgets('router returns a valid navigation route', (tester) async {
await tester.pumpApp(
Scaffold(
body: Builder(
builder: (context) {
return ElevatedButton(
onPressed: () {
Navigator.of(context)
.push<void>(TermsOfServicePage.route());
},
child: const Text(tapMeText),
);
},
),
),
);
await tester.tap(find.text(tapMeText));
await tester.pumpAndSettle();
expect(find.byType(TermsOfServicePage), findsOneWidget);
});
});
group('renders', () {
testWidgets('terms of service page header', (tester) async {
await tester.pumpApp(TermsOfServicePage());
expect(find.byType(TermsOfServiceHeader), findsOneWidget);
});
testWidgets('terms of service body', (tester) async {
await tester.pumpApp(TermsOfServicePage());
expect(find.byType(TermsOfServiceBody), findsOneWidget);
});
});
group('navigates', () {
testWidgets('back when tapped on back icon', (tester) async {
await tester.pumpApp(TermsOfServicePage());
await tester.tap(find.byType(AppBackButton));
await tester.pumpAndSettle();
expect(find.byType(TermsOfServicePage), findsNothing);
});
});
});
}
| news_toolkit/flutter_news_example/test/terms_of_service/view/terms_of_service_page_test.dart/0 | {
"file_path": "news_toolkit/flutter_news_example/test/terms_of_service/view/terms_of_service_page_test.dart",
"repo_id": "news_toolkit",
"token_count": 889
} | 957 |
// Replace with google-services.json from the Firebase Console // | news_toolkit/tool/generator/static/google-services.json/0 | {
"file_path": "news_toolkit/tool/generator/static/google-services.json",
"repo_id": "news_toolkit",
"token_count": 14
} | 958 |
#!/bin/bash
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -e
# To set FETCH_HEAD for "git merge-base" to work
git fetch origin main
cd script/tool
dart pub get
| packages/.ci/scripts/prepare_tool.sh/0 | {
"file_path": "packages/.ci/scripts/prepare_tool.sh",
"repo_id": "packages",
"token_count": 86
} | 959 |
tasks:
- name: prepare tool
script: .ci/scripts/prepare_tool.sh
infra_step: true # Note infra steps failing prevents "always" from running.
- name: create simulator
script: .ci/scripts/create_simulator.sh
infra_step: true # Note infra steps failing prevents "always" from running.
- name: download Dart and iOS deps
script: .ci/scripts/tool_runner.sh
args: ["fetch-deps", "--ios", "--supporting-target-platforms-only"]
infra_step: true
- name: build examples
script: .ci/scripts/tool_runner.sh
args: ["build-examples", "--ios"]
- name: xcode analyze
script: .ci/scripts/tool_runner.sh
args: ["xcode-analyze", "--ios"]
- name: xcode analyze deprecation
# Ensure we don't accidentally introduce deprecated code.
script: .ci/scripts/tool_runner.sh
args: ["xcode-analyze", "--ios", "--ios-min-version=13.0"]
- name: native test
script: .ci/scripts/tool_runner.sh
# Simulator name and version must match name and version in create_simulator.sh
args: ["native-test", "--ios", "--ios-destination", "platform=iOS Simulator,name=Flutter-iPhone,OS=17.0"]
- name: boot simulator
# Ensure simulator is still booted
script: .ci/scripts/boot_simulator.sh
infra_step: true # Note infra steps failing prevents "always" from running.
- name: drive examples
# `drive-examples` contains integration tests, which changes the UI of the application.
# This UI change sometimes affects `xctest`.
# So we run `drive-examples` after `native-test`; changing the order will result ci failure.
script: .ci/scripts/tool_runner.sh
args: ["drive-examples", "--ios", "--exclude=script/configs/exclude_integration_ios.yaml"]
- name: remove simulator
script: .ci/scripts/remove_simulator.sh
always: true
infra_step: true
| packages/.ci/targets/ios_platform_tests.yaml/0 | {
"file_path": "packages/.ci/targets/ios_platform_tests.yaml",
"repo_id": "packages",
"token_count": 625
} | 960 |
tasks:
- name: prepare tool
script: .ci/scripts/prepare_tool.sh
infra_step: true # Note infra steps failing prevents "always" from running.
- name: dart unit tests
script: .ci/scripts/dart_unit_tests_win32.sh
| packages/.ci/targets/windows_dart_unit_tests.yaml/0 | {
"file_path": "packages/.ci/targets/windows_dart_unit_tests.yaml",
"repo_id": "packages",
"token_count": 81
} | 961 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:flutter/material.dart';
/// Used by [PageTransitionsTheme] to define a page route transition animation
/// in which the outgoing page fades out, then the incoming page fades in and
/// scale up.
///
/// This pattern is recommended for a transition between UI elements that do not
/// have a strong relationship to one another.
///
/// Scale is only applied to incoming elements to emphasize new content over
/// old.
///
/// The following example shows how the FadeThroughPageTransitionsBuilder can
/// be used in a [PageTransitionsTheme] to change the default transitions
/// of [MaterialPageRoute]s.
///
/// ```dart
/// MaterialApp(
/// theme: ThemeData(
/// pageTransitionsTheme: PageTransitionsTheme(
/// builders: {
/// TargetPlatform.android: FadeThroughPageTransitionsBuilder(),
/// TargetPlatform.iOS: FadeThroughPageTransitionsBuilder(),
/// },
/// ),
/// ),
/// routes: {
/// '/': (BuildContext context) {
/// return Container(
/// color: Colors.red,
/// child: Center(
/// child: TextButton(
/// child: Text('Push route'),
/// onPressed: () {
/// Navigator.of(context).pushNamed('/a');
/// },
/// ),
/// ),
/// );
/// },
/// '/a' : (BuildContext context) {
/// return Container(
/// color: Colors.blue,
/// child: Center(
/// child: TextButton(
/// child: Text('Pop route'),
/// onPressed: () {
/// Navigator.of(context).pop();
/// },
/// ),
/// ),
/// );
/// },
/// },
/// );
/// ```
class FadeThroughPageTransitionsBuilder extends PageTransitionsBuilder {
/// Creates a [FadeThroughPageTransitionsBuilder].
const FadeThroughPageTransitionsBuilder({this.fillColor});
/// The color to use for the background color during the transition.
///
/// This defaults to the [Theme]'s [ThemeData.canvasColor].
final Color? fillColor;
@override
Widget buildTransitions<T>(
PageRoute<T>? route,
BuildContext? context,
Animation<double> animation,
Animation<double> secondaryAnimation,
Widget child,
) {
return FadeThroughTransition(
animation: animation,
secondaryAnimation: secondaryAnimation,
fillColor: fillColor,
child: child,
);
}
}
/// Defines a transition in which outgoing elements fade out, then incoming
/// elements fade in and scale up.
///
/// The fade through pattern provides a transition animation between UI elements
/// that do not have a strong relationship to one another. As an example, the
/// [BottomNavigationBar] may use this animation to transition the currently
/// displayed content when a new [BottomNavigationBarItem] is selected.
///
/// Scale is only applied to incoming elements to emphasize new content over
/// old.
///
/// Consider using [FadeThroughPageTransitionsBuilder] within a
/// [PageTransitionsTheme] if you want to apply this kind of transition to
/// [MaterialPageRoute] transitions within a Navigator (see
/// [FadeThroughPageTransitionsBuilder] for some example code). Or use this transition
/// directly in a [PageTransitionSwitcher.transitionBuilder] to transition
/// from one widget to another as seen in the following example:
///
/// ```dart
/// int _selectedIndex = 0;
///
/// final List<Color> _colors = [Colors.blue, Colors.red, Colors.yellow];
///
/// @override
/// Widget build(BuildContext context) {
/// return Scaffold(
/// appBar: AppBar(
/// title: const Text('Switcher Sample'),
/// ),
/// body: PageTransitionSwitcher(
/// transitionBuilder: (
/// Widget child,
/// Animation<double> primaryAnimation,
/// Animation<double> secondaryAnimation,
/// ) {
/// return FadeThroughTransition(
/// child: child,
/// animation: primaryAnimation,
/// secondaryAnimation: secondaryAnimation,
/// );
/// },
/// child: Container(
/// key: ValueKey<int>(_selectedIndex),
/// color: _colors[_selectedIndex],
/// ),
/// ),
/// bottomNavigationBar: BottomNavigationBar(
/// items: const <BottomNavigationBarItem>[
/// BottomNavigationBarItem(
/// icon: Icon(Icons.home),
/// title: Text('Blue'),
/// ),
/// BottomNavigationBarItem(
/// icon: Icon(Icons.business),
/// title: Text('Red'),
/// ),
/// BottomNavigationBarItem(
/// icon: Icon(Icons.school),
/// title: Text('Yellow'),
/// ),
/// ],
/// currentIndex: _selectedIndex,
/// selectedItemColor: Colors.amber[800],
/// onTap: (int index) {
/// setState(() {
/// _selectedIndex = index;
/// });
/// },
/// ),
/// );
/// }
/// ```
class FadeThroughTransition extends StatelessWidget {
/// Creates a [FadeThroughTransition].
///
/// The [animation] and [secondaryAnimation] argument are required and must
/// not be null.
const FadeThroughTransition({
super.key,
required this.animation,
required this.secondaryAnimation,
this.fillColor,
this.child,
});
/// The animation that drives the [child]'s entrance and exit.
///
/// See also:
///
/// * [TransitionRoute.animate], which is the value given to this property
/// when the [FadeThroughTransition] is used as a page transition.
final Animation<double> animation;
/// The animation that transitions [child] when new content is pushed on top
/// of it.
///
/// See also:
///
/// * [TransitionRoute.secondaryAnimation], which is the value given to this
// property when the [FadeThroughTransition] is used as a page transition.
final Animation<double> secondaryAnimation;
/// The color to use for the background color during the transition.
///
/// This defaults to the [Theme]'s [ThemeData.canvasColor].
final Color? fillColor;
/// The widget below this widget in the tree.
///
/// This widget will transition in and out as driven by [animation] and
/// [secondaryAnimation].
final Widget? child;
@override
Widget build(BuildContext context) {
return _ZoomedFadeInFadeOut(
animation: animation,
child: ColoredBox(
color: fillColor ?? Theme.of(context).canvasColor,
child: _ZoomedFadeInFadeOut(
animation: ReverseAnimation(secondaryAnimation),
child: child,
),
),
);
}
}
class _ZoomedFadeInFadeOut extends StatelessWidget {
const _ZoomedFadeInFadeOut({required this.animation, this.child});
final Animation<double> animation;
final Widget? child;
@override
Widget build(BuildContext context) {
return DualTransitionBuilder(
animation: animation,
forwardBuilder: (
BuildContext context,
Animation<double> animation,
Widget? child,
) {
return _ZoomedFadeIn(
animation: animation,
child: child,
);
},
reverseBuilder: (
BuildContext context,
Animation<double> animation,
Widget? child,
) {
return _FadeOut(
animation: animation,
child: child,
);
},
child: child,
);
}
}
class _ZoomedFadeIn extends StatelessWidget {
const _ZoomedFadeIn({
this.child,
required this.animation,
});
final Widget? child;
final Animation<double> animation;
static final CurveTween _inCurve = CurveTween(
curve: const Cubic(0.0, 0.0, 0.2, 1.0),
);
static final TweenSequence<double> _scaleIn = TweenSequence<double>(
<TweenSequenceItem<double>>[
TweenSequenceItem<double>(
tween: ConstantTween<double>(0.92),
weight: 6 / 20,
),
TweenSequenceItem<double>(
tween: Tween<double>(begin: 0.92, end: 1.0).chain(_inCurve),
weight: 14 / 20,
),
],
);
static final TweenSequence<double> _fadeInOpacity = TweenSequence<double>(
<TweenSequenceItem<double>>[
TweenSequenceItem<double>(
tween: ConstantTween<double>(0.0),
weight: 6 / 20,
),
TweenSequenceItem<double>(
tween: Tween<double>(begin: 0.0, end: 1.0).chain(_inCurve),
weight: 14 / 20,
),
],
);
@override
Widget build(BuildContext context) {
return FadeTransition(
opacity: _fadeInOpacity.animate(animation),
child: ScaleTransition(
scale: _scaleIn.animate(animation),
child: child,
),
);
}
}
class _FadeOut extends StatelessWidget {
const _FadeOut({
this.child,
required this.animation,
});
final Widget? child;
final Animation<double> animation;
static final CurveTween _outCurve = CurveTween(
curve: const Cubic(0.4, 0.0, 1.0, 1.0),
);
static final TweenSequence<double> _fadeOutOpacity = TweenSequence<double>(
<TweenSequenceItem<double>>[
TweenSequenceItem<double>(
tween: Tween<double>(begin: 1.0, end: 0.0).chain(_outCurve),
weight: 6 / 20,
),
TweenSequenceItem<double>(
tween: ConstantTween<double>(0.0),
weight: 14 / 20,
),
],
);
@override
Widget build(BuildContext context) {
return FadeTransition(
opacity: _fadeOutOpacity.animate(animation),
child: child,
);
}
}
| packages/packages/animations/lib/src/fade_through_transition.dart/0 | {
"file_path": "packages/packages/animations/lib/src/fade_through_transition.dart",
"repo_id": "packages",
"token_count": 3683
} | 962 |
# Camera Plugin
<?code-excerpt path-base="example/lib"?>
[](https://pub.dev/packages/camera)
A Flutter plugin for iOS, Android and Web allowing access to the device cameras.
| | Android | iOS | Web |
|----------------|---------|-----------|------------------------|
| **Support** | SDK 21+ | iOS 12.0+ | [See `camera_web `][1] |
## Features
* Display live camera preview in a widget.
* Snapshots can be captured and saved to a file.
* Record video.
* Add access to the image stream from Dart.
## Installation
First, add `camera` as a [dependency in your pubspec.yaml file](https://flutter.dev/using-packages/).
### iOS
Add two rows to the `ios/Runner/Info.plist`:
* one with the key `Privacy - Camera Usage Description` and a usage description.
* and one with the key `Privacy - Microphone Usage Description` and a usage description.
If editing `Info.plist` as text, add:
```xml
<key>NSCameraUsageDescription</key>
<string>your usage description here</string>
<key>NSMicrophoneUsageDescription</key>
<string>your usage description here</string>
```
### Android
Change the minimum Android sdk version to 21 (or higher) in your `android/app/build.gradle` file.
```groovy
minSdkVersion 21
```
It's important to note that the `MediaRecorder` class is not working properly on emulators, as stated in the documentation: https://developer.android.com/reference/android/media/MediaRecorder. Specifically, when recording a video with sound enabled and trying to play it back, the duration won't be correct and you will only see the first frame.
### Web integration
For web integration details, see the
[`camera_web` package](https://pub.dev/packages/camera_web).
### Handling Lifecycle states
As of version [0.5.0](https://github.com/flutter/packages/blob/main/packages/camera/CHANGELOG.md#050) of the camera plugin, lifecycle changes are no longer handled by the plugin. This means developers are now responsible to control camera resources when the lifecycle state is updated. Failure to do so might lead to unexpected behavior (for example as described in issue [#39109](https://github.com/flutter/flutter/issues/39109)). Handling lifecycle changes can be done by overriding the `didChangeAppLifecycleState` method like so:
<?code-excerpt "main.dart (AppLifecycle)"?>
```dart
@override
void didChangeAppLifecycleState(AppLifecycleState state) {
final CameraController? cameraController = controller;
// App state changed before we got the chance to initialize.
if (cameraController == null || !cameraController.value.isInitialized) {
return;
}
if (state == AppLifecycleState.inactive) {
cameraController.dispose();
} else if (state == AppLifecycleState.resumed) {
_initializeCameraController(cameraController.description);
}
}
```
### Handling camera access permissions
Permission errors may be thrown when initializing the camera controller, and you are expected to handle them properly.
Here is a list of all permission error codes that can be thrown:
- `CameraAccessDenied`: Thrown when user denies the camera access permission.
- `CameraAccessDeniedWithoutPrompt`: iOS only for now. Thrown when user has previously denied the permission. iOS does not allow prompting alert dialog a second time. Users will have to go to Settings > Privacy > Camera in order to enable camera access.
- `CameraAccessRestricted`: iOS only for now. Thrown when camera access is restricted and users cannot grant permission (parental control).
- `AudioAccessDenied`: Thrown when user denies the audio access permission.
- `AudioAccessDeniedWithoutPrompt`: iOS only for now. Thrown when user has previously denied the permission. iOS does not allow prompting alert dialog a second time. Users will have to go to Settings > Privacy > Microphone in order to enable audio access.
- `AudioAccessRestricted`: iOS only for now. Thrown when audio access is restricted and users cannot grant permission (parental control).
### Example
Here is a small example flutter app displaying a full screen camera preview.
<?code-excerpt "readme_full_example.dart (FullAppExample)"?>
```dart
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
late List<CameraDescription> _cameras;
Future<void> main() async {
WidgetsFlutterBinding.ensureInitialized();
_cameras = await availableCameras();
runApp(const CameraApp());
}
/// CameraApp is the Main Application.
class CameraApp extends StatefulWidget {
/// Default Constructor
const CameraApp({super.key});
@override
State<CameraApp> createState() => _CameraAppState();
}
class _CameraAppState extends State<CameraApp> {
late CameraController controller;
@override
void initState() {
super.initState();
controller = CameraController(_cameras[0], ResolutionPreset.max);
controller.initialize().then((_) {
if (!mounted) {
return;
}
setState(() {});
}).catchError((Object e) {
if (e is CameraException) {
switch (e.code) {
case 'CameraAccessDenied':
// Handle access errors here.
break;
default:
// Handle other errors here.
break;
}
}
});
}
@override
void dispose() {
controller.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
if (!controller.value.isInitialized) {
return Container();
}
return MaterialApp(
home: CameraPreview(controller),
);
}
}
```
For a more elaborate usage example see [here](https://github.com/flutter/packages/tree/main/packages/camera/camera/example).
[1]: https://pub.dev/packages/camera_web#limitations-on-the-web-platform
| packages/packages/camera/camera/README.md/0 | {
"file_path": "packages/packages/camera/camera/README.md",
"repo_id": "packages",
"token_count": 1757
} | 963 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
export 'package:camera_platform_interface/camera_platform_interface.dart'
show
CameraDescription,
CameraException,
CameraLensDirection,
ExposureMode,
FlashMode,
FocusMode,
ImageFormatGroup,
ResolutionPreset,
XFile;
export 'src/camera_controller.dart';
export 'src/camera_image.dart';
export 'src/camera_preview.dart';
| packages/packages/camera/camera/lib/camera.dart/0 | {
"file_path": "packages/packages/camera/camera/lib/camera.dart",
"repo_id": "packages",
"token_count": 208
} | 964 |
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="io.flutter.plugins.camera">
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
</manifest>
| packages/packages/camera/camera_android/android/src/main/AndroidManifest.xml/0 | {
"file_path": "packages/packages/camera/camera_android/android/src/main/AndroidManifest.xml",
"repo_id": "packages",
"token_count": 90
} | 965 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugins.camera;
import static android.os.SystemClock.uptimeMillis;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.opengl.Matrix;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.view.Surface;
import androidx.annotation.NonNull;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* Renders video onto texture after performing a matrix rotation on each frame.
*
* <p>VideoRenderer is needed because when switching between cameras mid recording, the orientation
* of the recording from the new camera usually becomes flipped. MediaRecorder has
* setOrientationHint, but that cannot be called mid recording and therefore isn't useful. Android
* Camera2 has no setDisplayOrientation on the camera itself as it is supposed to 'just work' (see
* https://stackoverflow.com/questions/33479004/what-is-the-camera2-api-equivalent-of-setdisplayorientation).
* Therefore it cannot be used to set the camera's orientation either.
*
* <p>This leaves the solution to be routing the recording through a surface texture and performing
* a matrix transformation on it manually to get the correct orientation. This only happens when
* setDescription is called mid video recording.
*/
public class VideoRenderer {
static String TAG = "VideoRenderer";
private static final String vertexShaderCode =
" precision highp float;\n"
+ " attribute vec3 vertexPosition;\n"
+ " attribute vec2 uvs;\n"
+ " varying vec2 varUvs;\n"
+ " uniform mat4 texMatrix;\n"
+ " uniform mat4 mvp;\n"
+ "\n"
+ " void main()\n"
+ " {\n"
+ " varUvs = (texMatrix * vec4(uvs.x, uvs.y, 0, 1.0)).xy;\n"
+ " gl_Position = mvp * vec4(vertexPosition, 1.0);\n"
+ " }";
private static final String fragmentShaderCode =
" #extension GL_OES_EGL_image_external : require\n"
+ " precision mediump float;\n"
+ "\n"
+ " varying vec2 varUvs;\n"
+ " uniform samplerExternalOES texSampler;\n"
+ "\n"
+ " void main()\n"
+ " {\n"
+ " vec4 c = texture2D(texSampler, varUvs);\n"
+ " gl_FragColor = vec4(c.r, c.g, c.b, c.a);\n"
+ " }";
private final int[] textureHandles = new int[1];
private final float[] vertices =
new float[] {
-1.0f, -1.0f, 0.0f, 0f, 0f, -1.0f, 1.0f, 0.0f, 0f, 1f, 1.0f, 1.0f, 0.0f, 1f, 1f, 1.0f,
-1.0f, 0.0f, 1f, 0f
};
private final int[] indices = new int[] {2, 1, 0, 0, 3, 2};
private int program;
private int vertexHandle = 0;
private final int[] bufferHandles = new int[2];
private int uvsHandle = 0;
private int texMatrixHandle = 0;
private int mvpHandle = 0;
EGLDisplay display;
EGLContext context;
EGLSurface surface;
private Thread thread;
private final Surface outputSurface;
SurfaceTexture inputSurfaceTexture;
private Surface inputSurface;
private HandlerThread surfaceTextureFrameAvailableHandler;
final Object surfaceTextureAvailableFrameLock = new Object();
Boolean surfaceTextureFrameAvailable = false;
final int recordingWidth;
final int recordingHeight;
private int rotation = 0;
private final Object lock = new Object();
private final Thread.UncaughtExceptionHandler uncaughtExceptionHandler;
/** Gets surface for input. Blocks until surface is ready. */
@NonNull
public Surface getInputSurface() throws InterruptedException {
synchronized (lock) {
while (inputSurface == null) {
lock.wait();
}
}
return inputSurface;
}
public VideoRenderer(
@NonNull Surface outputSurface,
int recordingWidth,
int recordingHeight,
@NonNull Thread.UncaughtExceptionHandler uncaughtExceptionHandler) {
this.outputSurface = outputSurface;
this.recordingHeight = recordingHeight;
this.recordingWidth = recordingWidth;
this.uncaughtExceptionHandler = uncaughtExceptionHandler;
startOpenGL();
Log.d(TAG, "VideoRenderer setup complete");
}
/** Stop rendering and cleanup resources. */
public void close() {
thread.interrupt();
surfaceTextureFrameAvailableHandler.quitSafely();
cleanupOpenGL();
inputSurfaceTexture.release();
}
private void cleanupOpenGL() {
GLES20.glDeleteBuffers(2, bufferHandles, 0);
GLES20.glDeleteTextures(1, textureHandles, 0);
EGL14.eglDestroyContext(display, context);
EGL14.eglDestroySurface(display, surface);
GLES20.glDeleteProgram(program);
}
/** Configures openGL. Must be called in same thread as draw is called. */
void configureOpenGL() {
synchronized (lock) {
display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (display == EGL14.EGL_NO_DISPLAY)
throw new RuntimeException(
"eglDisplay == EGL14.EGL_NO_DISPLAY: "
+ GLUtils.getEGLErrorString(EGL14.eglGetError()));
int[] version = new int[2];
if (!EGL14.eglInitialize(display, version, 0, version, 1))
throw new RuntimeException(
"eglInitialize(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
String eglExtensions = EGL14.eglQueryString(display, EGL14.EGL_EXTENSIONS);
if (!eglExtensions.contains("EGL_ANDROID_presentation_time"))
throw new RuntimeException(
"cannot configure OpenGL. missing EGL_ANDROID_presentation_time");
int[] attribList;
if (SdkCapabilityChecker.supportsEglRecordableAndroid()) {
attribList =
new int[] {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGLExt.EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_NONE
};
} else {
attribList =
new int[] {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_NONE
};
}
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(display, attribList, 0, configs, 0, configs.length, numConfigs, 0))
throw new RuntimeException(GLUtils.getEGLErrorString(EGL14.eglGetError()));
int err = EGL14.eglGetError();
if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
int[] ctxAttribs = new int[] {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
context = EGL14.eglCreateContext(display, configs[0], EGL14.EGL_NO_CONTEXT, ctxAttribs, 0);
err = EGL14.eglGetError();
if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
int[] surfaceAttribs = new int[] {EGL14.EGL_NONE};
surface = EGL14.eglCreateWindowSurface(display, configs[0], outputSurface, surfaceAttribs, 0);
err = EGL14.eglGetError();
if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
if (!EGL14.eglMakeCurrent(display, surface, surface, context))
throw new RuntimeException(
"eglMakeCurrent(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
ByteBuffer vertexBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
vertexBuffer.order(ByteOrder.nativeOrder());
vertexBuffer.asFloatBuffer().put(vertices);
vertexBuffer.asFloatBuffer().position(0);
ByteBuffer indexBuffer = ByteBuffer.allocateDirect(indices.length * 4);
indexBuffer.order(ByteOrder.nativeOrder());
indexBuffer.asIntBuffer().put(indices);
indexBuffer.position(0);
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
deleteShader(vertexShader);
deleteShader(fragmentShader);
vertexHandle = GLES20.glGetAttribLocation(program, "vertexPosition");
uvsHandle = GLES20.glGetAttribLocation(program, "uvs");
texMatrixHandle = GLES20.glGetUniformLocation(program, "texMatrix");
mvpHandle = GLES20.glGetUniformLocation(program, "mvp");
// Initialize buffers
GLES20.glGenBuffers(2, bufferHandles, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]);
GLES20.glBufferData(
GLES20.GL_ARRAY_BUFFER, vertices.length * 4, vertexBuffer, GLES20.GL_DYNAMIC_DRAW);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]);
GLES20.glBufferData(
GLES20.GL_ELEMENT_ARRAY_BUFFER, indices.length * 4, indexBuffer, GLES20.GL_DYNAMIC_DRAW);
// Init texture that will receive decoded frames
GLES20.glGenTextures(1, textureHandles, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureHandles[0]);
inputSurfaceTexture = new SurfaceTexture(getTexId());
inputSurfaceTexture.setDefaultBufferSize(recordingWidth, recordingHeight);
surfaceTextureFrameAvailableHandler = new HandlerThread("FrameHandlerThread");
surfaceTextureFrameAvailableHandler.start();
inputSurface = new Surface(inputSurfaceTexture);
inputSurfaceTexture.setOnFrameAvailableListener(
new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (surfaceTextureAvailableFrameLock) {
if (surfaceTextureFrameAvailable)
Log.w(TAG, "Frame available before processing other frames. dropping frames");
surfaceTextureFrameAvailable = true;
surfaceTextureAvailableFrameLock.notifyAll();
}
}
},
new Handler(surfaceTextureFrameAvailableHandler.getLooper()));
lock.notifyAll();
}
}
/** Starts and configures Video Renderer. */
private void startOpenGL() {
Log.d(TAG, "Starting OpenGL Thread");
thread =
new Thread() {
@Override
public void run() {
configureOpenGL();
try {
// Continuously pull frames from input surface texture and use videoRenderer to modify
// to correct rotation.
while (!Thread.interrupted()) {
synchronized (surfaceTextureAvailableFrameLock) {
while (!surfaceTextureFrameAvailable) {
surfaceTextureAvailableFrameLock.wait(500);
}
surfaceTextureFrameAvailable = false;
}
inputSurfaceTexture.updateTexImage();
float[] surfaceTextureMatrix = new float[16];
inputSurfaceTexture.getTransformMatrix(surfaceTextureMatrix);
draw(recordingWidth, recordingHeight, surfaceTextureMatrix);
}
} catch (InterruptedException e) {
Log.d(TAG, "thread interrupted while waiting for frames");
}
}
};
thread.setUncaughtExceptionHandler(uncaughtExceptionHandler);
thread.start();
}
public int getTexId() {
return textureHandles[0];
}
@NonNull
public float[] moveMatrix() {
float[] m = new float[16];
Matrix.setIdentityM(m, 0);
Matrix.rotateM(m, 0, rotation, 0, 0, 1);
return m;
}
public void setRotation(int rotation) {
this.rotation = rotation;
}
private int loadShader(int type, String code) {
int shader = GLES20.glCreateShader(type);
GLES20.glShaderSource(shader, code);
GLES20.glCompileShader(shader);
return shader;
}
private void deleteShader(int shader) {
GLES20.glDeleteShader(shader);
}
public void draw(int viewportWidth, int viewportHeight, @NonNull float[] texMatrix) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glClearColor(0f, 0f, 0f, 0f);
GLES20.glViewport(0, 0, viewportWidth, viewportHeight);
GLES20.glUseProgram(program);
// Pass transformations to shader
GLES20.glUniformMatrix4fv(texMatrixHandle, 1, false, texMatrix, 0);
GLES20.glUniformMatrix4fv(mvpHandle, 1, false, moveMatrix(), 0);
// Prepare buffers with vertices and indices & draw
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]);
GLES20.glEnableVertexAttribArray(vertexHandle);
GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 4 * 5, 0);
GLES20.glEnableVertexAttribArray(uvsHandle);
GLES20.glVertexAttribPointer(uvsHandle, 2, GLES20.GL_FLOAT, false, 4 * 5, 3 * 4);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_INT, 0);
EGLExt.eglPresentationTimeANDROID(display, surface, uptimeMillis() * 1000000);
if (!EGL14.eglSwapBuffers(display, surface)) {
Log.w(TAG, "eglSwapBuffers() " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
}
| packages/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java/0 | {
"file_path": "packages/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java",
"repo_id": "packages",
"token_count": 5847
} | 966 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugins.camera.features.noisereduction;
import android.annotation.SuppressLint;
import android.hardware.camera2.CaptureRequest;
import android.util.Log;
import androidx.annotation.NonNull;
import io.flutter.BuildConfig;
import io.flutter.plugins.camera.CameraProperties;
import io.flutter.plugins.camera.SdkCapabilityChecker;
import io.flutter.plugins.camera.features.CameraFeature;
import java.util.HashMap;
/**
* This can either be enabled or disabled. Only full capability devices can set this to off. Legacy
* and full support the fast mode.
* https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
*/
public class NoiseReductionFeature extends CameraFeature<NoiseReductionMode> {
@NonNull private NoiseReductionMode currentSetting = NoiseReductionMode.fast;
private final HashMap<NoiseReductionMode, Integer> NOISE_REDUCTION_MODES = new HashMap<>();
/**
* Creates a new instance of the {@link NoiseReductionFeature}.
*
* @param cameraProperties Collection of the characteristics for the current camera device.
*/
public NoiseReductionFeature(@NonNull CameraProperties cameraProperties) {
super(cameraProperties);
NOISE_REDUCTION_MODES.put(NoiseReductionMode.off, CaptureRequest.NOISE_REDUCTION_MODE_OFF);
NOISE_REDUCTION_MODES.put(NoiseReductionMode.fast, CaptureRequest.NOISE_REDUCTION_MODE_FAST);
NOISE_REDUCTION_MODES.put(
NoiseReductionMode.highQuality, CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
if (SdkCapabilityChecker.supportsMarshmallowNoiseReductionModes()) {
NOISE_REDUCTION_MODES.put(
NoiseReductionMode.minimal, CaptureRequest.NOISE_REDUCTION_MODE_MINIMAL);
NOISE_REDUCTION_MODES.put(
NoiseReductionMode.zeroShutterLag, CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG);
}
}
@NonNull
@Override
public String getDebugName() {
return "NoiseReductionFeature";
}
@SuppressLint("KotlinPropertyAccess")
@NonNull
@Override
public NoiseReductionMode getValue() {
return currentSetting;
}
@Override
public void setValue(@NonNull NoiseReductionMode value) {
this.currentSetting = value;
}
@Override
public boolean checkIsSupported() {
/*
* Available settings: public static final int NOISE_REDUCTION_MODE_FAST = 1; public static
* final int NOISE_REDUCTION_MODE_HIGH_QUALITY = 2; public static final int
* NOISE_REDUCTION_MODE_MINIMAL = 3; public static final int NOISE_REDUCTION_MODE_OFF = 0;
* public static final int NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG = 4;
*
* <p>Full-capability camera devices will always support OFF and FAST. Camera devices that
* support YUV_REPROCESSING or PRIVATE_REPROCESSING will support ZERO_SHUTTER_LAG.
* Legacy-capability camera devices will only support FAST mode.
*/
// Can be null on some devices.
int[] modes = cameraProperties.getAvailableNoiseReductionModes();
/// If there's at least one mode available then we are supported.
return modes != null && modes.length > 0;
}
@Override
public void updateBuilder(@NonNull CaptureRequest.Builder requestBuilder) {
if (!checkIsSupported()) {
return;
}
if (BuildConfig.DEBUG) {
Log.i("Camera", "updateNoiseReduction | currentSetting: " + currentSetting);
}
// Always use fast mode.
requestBuilder.set(
CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODES.get(currentSetting));
}
}
| packages/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeature.java/0 | {
"file_path": "packages/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/features/noisereduction/NoiseReductionFeature.java",
"repo_id": "packages",
"token_count": 1246
} | 967 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugins.camera.media;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.*;
import android.media.CamcorderProfile;
import android.media.EncoderProfiles;
import android.media.MediaRecorder;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.util.List;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InOrder;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
@RunWith(RobolectricTestRunner.class)
public class MediaRecorderBuilderTest {
@Config(maxSdk = 30)
@SuppressWarnings("deprecation")
@Test
public void ctor_testLegacy() {
MediaRecorderBuilder builder =
new MediaRecorderBuilder(CamcorderProfile.get(CamcorderProfile.QUALITY_1080P), "");
assertNotNull(builder);
}
@Config(minSdk = 31)
@Test
public void ctor_test() {
MediaRecorderBuilder builder =
new MediaRecorderBuilder(CamcorderProfile.getAll("0", CamcorderProfile.QUALITY_1080P), "");
assertNotNull(builder);
}
@Config(maxSdk = 30)
@SuppressWarnings("deprecation")
@Test
public void build_shouldSetValuesInCorrectOrderWhenAudioIsDisabledLegacy() throws IOException {
CamcorderProfile recorderProfile = getEmptyCamcorderProfile();
MediaRecorderBuilder.MediaRecorderFactory mockFactory =
mock(MediaRecorderBuilder.MediaRecorderFactory.class);
MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
String outputFilePath = "mock_video_file_path";
int mediaOrientation = 1;
MediaRecorderBuilder builder =
new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
.setEnableAudio(false)
.setMediaOrientation(mediaOrientation);
when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
MediaRecorder recorder = builder.build();
InOrder inOrder = inOrder(recorder);
inOrder.verify(recorder).setVideoSource(MediaRecorder.VideoSource.SURFACE);
inOrder.verify(recorder).setOutputFormat(recorderProfile.fileFormat);
inOrder.verify(recorder).setVideoEncoder(recorderProfile.videoCodec);
inOrder.verify(recorder).setVideoEncodingBitRate(recorderProfile.videoBitRate);
inOrder.verify(recorder).setVideoFrameRate(recorderProfile.videoFrameRate);
inOrder
.verify(recorder)
.setVideoSize(recorderProfile.videoFrameWidth, recorderProfile.videoFrameHeight);
inOrder.verify(recorder).setOutputFile(outputFilePath);
inOrder.verify(recorder).setOrientationHint(mediaOrientation);
inOrder.verify(recorder).prepare();
}
@Config(minSdk = 31)
@Test
public void build_shouldSetValuesInCorrectOrderWhenAudioIsDisabled() throws IOException {
EncoderProfiles recorderProfile = mock(EncoderProfiles.class);
List<EncoderProfiles.VideoProfile> mockVideoProfiles =
List.of(getEmptyEncoderProfilesVideoProfile());
List<EncoderProfiles.AudioProfile> mockAudioProfiles =
List.of(getEmptyEncoderProfilesAudioProfile());
MediaRecorderBuilder.MediaRecorderFactory mockFactory =
mock(MediaRecorderBuilder.MediaRecorderFactory.class);
MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
String outputFilePath = "mock_video_file_path";
int mediaOrientation = 1;
MediaRecorderBuilder builder =
new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
.setEnableAudio(false)
.setMediaOrientation(mediaOrientation);
when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
when(recorderProfile.getVideoProfiles()).thenReturn(mockVideoProfiles);
when(recorderProfile.getAudioProfiles()).thenReturn(mockAudioProfiles);
MediaRecorder recorder = builder.build();
EncoderProfiles.VideoProfile videoProfile = mockVideoProfiles.get(0);
InOrder inOrder = inOrder(recorder);
inOrder.verify(recorder).setVideoSource(MediaRecorder.VideoSource.SURFACE);
inOrder.verify(recorder).setOutputFormat(recorderProfile.getRecommendedFileFormat());
inOrder.verify(recorder).setVideoEncoder(videoProfile.getCodec());
inOrder.verify(recorder).setVideoEncodingBitRate(videoProfile.getBitrate());
inOrder.verify(recorder).setVideoFrameRate(videoProfile.getFrameRate());
inOrder.verify(recorder).setVideoSize(videoProfile.getWidth(), videoProfile.getHeight());
inOrder.verify(recorder).setOutputFile(outputFilePath);
inOrder.verify(recorder).setOrientationHint(mediaOrientation);
inOrder.verify(recorder).prepare();
}
@Config(minSdk = 31)
@Test(expected = IndexOutOfBoundsException.class)
public void build_shouldThrowExceptionWithoutVideoOrAudioProfiles() throws IOException {
EncoderProfiles recorderProfile = mock(EncoderProfiles.class);
MediaRecorderBuilder.MediaRecorderFactory mockFactory =
mock(MediaRecorderBuilder.MediaRecorderFactory.class);
MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
String outputFilePath = "mock_video_file_path";
int mediaOrientation = 1;
MediaRecorderBuilder builder =
new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
.setEnableAudio(false)
.setMediaOrientation(mediaOrientation);
when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
MediaRecorder recorder = builder.build();
}
@Config(maxSdk = 30)
@SuppressWarnings("deprecation")
@Test
public void build_shouldSetValuesInCorrectOrderWhenAudioIsEnabledLegacy() throws IOException {
CamcorderProfile recorderProfile = getEmptyCamcorderProfile();
MediaRecorderBuilder.MediaRecorderFactory mockFactory =
mock(MediaRecorderBuilder.MediaRecorderFactory.class);
MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
String outputFilePath = "mock_video_file_path";
int mediaOrientation = 1;
MediaRecorderBuilder builder =
new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
.setEnableAudio(true)
.setMediaOrientation(mediaOrientation);
when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
MediaRecorder recorder = builder.build();
InOrder inOrder = inOrder(recorder);
inOrder.verify(recorder).setAudioSource(MediaRecorder.AudioSource.MIC);
inOrder.verify(recorder).setVideoSource(MediaRecorder.VideoSource.SURFACE);
inOrder.verify(recorder).setOutputFormat(recorderProfile.fileFormat);
inOrder.verify(recorder).setAudioEncoder(recorderProfile.audioCodec);
inOrder.verify(recorder).setAudioEncodingBitRate(recorderProfile.audioBitRate);
inOrder.verify(recorder).setAudioSamplingRate(recorderProfile.audioSampleRate);
inOrder.verify(recorder).setVideoEncoder(recorderProfile.videoCodec);
inOrder.verify(recorder).setVideoEncodingBitRate(recorderProfile.videoBitRate);
inOrder.verify(recorder).setVideoFrameRate(recorderProfile.videoFrameRate);
inOrder
.verify(recorder)
.setVideoSize(recorderProfile.videoFrameWidth, recorderProfile.videoFrameHeight);
inOrder.verify(recorder).setOutputFile(outputFilePath);
inOrder.verify(recorder).setOrientationHint(mediaOrientation);
inOrder.verify(recorder).prepare();
}
@Config(minSdk = 31)
@Test
public void build_shouldSetValuesInCorrectOrderWhenAudioIsEnabled() throws IOException {
EncoderProfiles recorderProfile = mock(EncoderProfiles.class);
List<EncoderProfiles.VideoProfile> mockVideoProfiles =
List.of(getEmptyEncoderProfilesVideoProfile());
List<EncoderProfiles.AudioProfile> mockAudioProfiles =
List.of(getEmptyEncoderProfilesAudioProfile());
MediaRecorderBuilder.MediaRecorderFactory mockFactory =
mock(MediaRecorderBuilder.MediaRecorderFactory.class);
MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
String outputFilePath = "mock_video_file_path";
int mediaOrientation = 1;
MediaRecorderBuilder builder =
new MediaRecorderBuilder(recorderProfile, outputFilePath, mockFactory)
.setEnableAudio(true)
.setMediaOrientation(mediaOrientation);
when(mockFactory.makeMediaRecorder()).thenReturn(mockMediaRecorder);
when(recorderProfile.getVideoProfiles()).thenReturn(mockVideoProfiles);
when(recorderProfile.getAudioProfiles()).thenReturn(mockAudioProfiles);
MediaRecorder recorder = builder.build();
EncoderProfiles.VideoProfile videoProfile = mockVideoProfiles.get(0);
EncoderProfiles.AudioProfile audioProfile = mockAudioProfiles.get(0);
InOrder inOrder = inOrder(recorder);
inOrder.verify(recorder).setAudioSource(MediaRecorder.AudioSource.MIC);
inOrder.verify(recorder).setVideoSource(MediaRecorder.VideoSource.SURFACE);
inOrder.verify(recorder).setOutputFormat(recorderProfile.getRecommendedFileFormat());
inOrder.verify(recorder).setAudioEncoder(audioProfile.getCodec());
inOrder.verify(recorder).setAudioEncodingBitRate(audioProfile.getBitrate());
inOrder.verify(recorder).setAudioSamplingRate(audioProfile.getSampleRate());
inOrder.verify(recorder).setVideoEncoder(videoProfile.getCodec());
inOrder.verify(recorder).setVideoEncodingBitRate(videoProfile.getBitrate());
inOrder.verify(recorder).setVideoFrameRate(videoProfile.getFrameRate());
inOrder.verify(recorder).setVideoSize(videoProfile.getWidth(), videoProfile.getHeight());
inOrder.verify(recorder).setOutputFile(outputFilePath);
inOrder.verify(recorder).setOrientationHint(mediaOrientation);
inOrder.verify(recorder).prepare();
}
private CamcorderProfile getEmptyCamcorderProfile() {
try {
Constructor<CamcorderProfile> constructor =
CamcorderProfile.class.getDeclaredConstructor(
int.class, int.class, int.class, int.class, int.class, int.class, int.class,
int.class, int.class, int.class, int.class, int.class);
constructor.setAccessible(true);
return constructor.newInstance(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
} catch (Exception ignored) {
}
return null;
}
private EncoderProfiles.VideoProfile getEmptyEncoderProfilesVideoProfile() {
try {
Constructor<EncoderProfiles.VideoProfile> constructor =
EncoderProfiles.VideoProfile.class.getDeclaredConstructor(
int.class, int.class, int.class, int.class, int.class, int.class);
constructor.setAccessible(true);
return constructor.newInstance(0, 0, 0, 0, 0, 0);
} catch (Exception ignored) {
}
return null;
}
private EncoderProfiles.AudioProfile getEmptyEncoderProfilesAudioProfile() {
try {
Constructor<EncoderProfiles.AudioProfile> constructor =
EncoderProfiles.AudioProfile.class.getDeclaredConstructor(
int.class, int.class, int.class, int.class, int.class);
constructor.setAccessible(true);
return constructor.newInstance(0, 0, 0, 0, 0);
} catch (Exception ignored) {
}
return null;
}
}
| packages/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/media/MediaRecorderBuilderTest.java/0 | {
"file_path": "packages/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/media/MediaRecorderBuilderTest.java",
"repo_id": "packages",
"token_count": 3859
} | 968 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/services.dart';
/// Parses a string into a corresponding CameraLensDirection.
CameraLensDirection parseCameraLensDirection(String string) {
switch (string) {
case 'front':
return CameraLensDirection.front;
case 'back':
return CameraLensDirection.back;
case 'external':
return CameraLensDirection.external;
}
throw ArgumentError('Unknown CameraLensDirection value');
}
/// Returns the device orientation as a String.
String serializeDeviceOrientation(DeviceOrientation orientation) {
switch (orientation) {
case DeviceOrientation.portraitUp:
return 'portraitUp';
case DeviceOrientation.portraitDown:
return 'portraitDown';
case DeviceOrientation.landscapeRight:
return 'landscapeRight';
case DeviceOrientation.landscapeLeft:
return 'landscapeLeft';
}
// The enum comes from a different package, which could get a new value at
// any time, so provide a fallback that ensures this won't break when used
// with a version that contains new values. This is deliberately outside
// the switch rather than a `default` so that the linter will flag the
// switch as needing an update.
// ignore: dead_code
return 'portraitUp';
}
/// Returns the device orientation for a given String.
DeviceOrientation deserializeDeviceOrientation(String str) {
switch (str) {
case 'portraitUp':
return DeviceOrientation.portraitUp;
case 'portraitDown':
return DeviceOrientation.portraitDown;
case 'landscapeRight':
return DeviceOrientation.landscapeRight;
case 'landscapeLeft':
return DeviceOrientation.landscapeLeft;
default:
throw ArgumentError('"$str" is not a valid DeviceOrientation value');
}
}
| packages/packages/camera/camera_android/lib/src/utils.dart/0 | {
"file_path": "packages/packages/camera/camera_android/lib/src/utils.dart",
"repo_id": "packages",
"token_count": 611
} | 969 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugins.camerax;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugins.camerax.GeneratedCameraXLibrary.AnalyzerHostApi;
/**
* Host API implementation for {@link ImageAnalysis.Analyzer}.
*
* <p>This class may handle instantiating and adding native object instances that are attached to a
* Dart instance or handle method calls on the associated native class or an instance of the class.
*/
public class AnalyzerHostApiImpl implements AnalyzerHostApi {
private final BinaryMessenger binaryMessenger;
private final InstanceManager instanceManager;
private final AnalyzerProxy proxy;
/** Proxy for constructor of {@link ImageAnalysis.Analyzer}. */
@VisibleForTesting
public static class AnalyzerProxy {
/** Creates an instance of {@link AnalyzerImpl}. */
@NonNull
public AnalyzerImpl create(
@NonNull BinaryMessenger binaryMessenger, @NonNull InstanceManager instanceManager) {
return new AnalyzerImpl(binaryMessenger, instanceManager);
}
}
/**
* Implementation of {@link ImageAnalysis.Analyzer} that passes arguments of callback methods to
* Dart.
*/
public static class AnalyzerImpl implements ImageAnalysis.Analyzer {
private BinaryMessenger binaryMessenger;
private InstanceManager instanceManager;
private AnalyzerFlutterApiImpl api;
@VisibleForTesting @NonNull public ImageProxyFlutterApiImpl imageProxyApi;
/**
* Constructs an instance of {@link ImageAnalysis.Analyzer} that passes arguments of callbacks
* methods to Dart.
*/
public AnalyzerImpl(
@NonNull BinaryMessenger binaryMessenger, @NonNull InstanceManager instanceManager) {
super();
this.binaryMessenger = binaryMessenger;
this.instanceManager = instanceManager;
api = new AnalyzerFlutterApiImpl(binaryMessenger, instanceManager);
imageProxyApi = new ImageProxyFlutterApiImpl(binaryMessenger, instanceManager);
}
@Override
public void analyze(@NonNull ImageProxy imageProxy) {
Long imageFormat = Long.valueOf(imageProxy.getFormat());
Long imageHeight = Long.valueOf(imageProxy.getHeight());
Long imageWidth = Long.valueOf(imageProxy.getWidth());
imageProxyApi.create(imageProxy, imageFormat, imageHeight, imageWidth, reply -> {});
api.analyze(this, imageProxy, reply -> {});
}
/**
* Flutter API used to send messages back to Dart.
*
* <p>This is only visible for testing.
*/
@VisibleForTesting
void setApi(@NonNull AnalyzerFlutterApiImpl api) {
this.api = api;
}
}
/**
* Constructs a {@link AnalyzerHostApiImpl}.
*
* @param binaryMessenger used to communicate with Dart over asynchronous messages
* @param instanceManager maintains instances stored to communicate with attached Dart objects
*/
public AnalyzerHostApiImpl(
@NonNull BinaryMessenger binaryMessenger, @NonNull InstanceManager instanceManager) {
this(binaryMessenger, instanceManager, new AnalyzerProxy());
}
/**
* Constructs a {@link AnalyzerHostApiImpl}.
*
* @param binaryMessenger used to communicate with Dart over asynchronous messages
* @param instanceManager maintains instances stored to communicate with attached Dart objects
* @param proxy proxy for constructor of {@link ImageAnalysis.Analyzer}
*/
@VisibleForTesting
AnalyzerHostApiImpl(
@NonNull BinaryMessenger binaryMessenger,
@NonNull InstanceManager instanceManager,
@NonNull AnalyzerProxy proxy) {
this.binaryMessenger = binaryMessenger;
this.instanceManager = instanceManager;
this.proxy = proxy;
}
/**
* Creates an {@link AnalyzerProxy} that represents an {@link ImageAnalysis.Analyzer} instance
* with the specified identifier.
*/
@Override
public void create(@NonNull Long identifier) {
instanceManager.addDartCreatedInstance(
proxy.create(binaryMessenger, instanceManager), identifier);
}
}
| packages/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/AnalyzerHostApiImpl.java/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/AnalyzerHostApiImpl.java",
"repo_id": "packages",
"token_count": 1309
} | 970 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugins.camerax;
import android.hardware.camera2.CaptureRequest;
import androidx.annotation.NonNull;
import androidx.annotation.OptIn;
import androidx.annotation.VisibleForTesting;
import androidx.camera.camera2.interop.CaptureRequestOptions;
import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CaptureRequestKeySupportedType;
import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CaptureRequestOptionsHostApi;
import java.util.HashMap;
import java.util.Map;
/**
* Host API implementation for {@link CaptureRequestOptions}.
*
* <p>This class may handle instantiating and adding native object instances that are attached to a
* Dart instance or handle method calls on the associated native class or an instance of the class.
*/
public class CaptureRequestOptionsHostApiImpl implements CaptureRequestOptionsHostApi {
private final InstanceManager instanceManager;
private final CaptureRequestOptionsProxy proxy;
/** Proxy for constructor of {@link CaptureRequestOptions}. */
@VisibleForTesting
public static class CaptureRequestOptionsProxy {
/** Creates an instance of {@link CaptureRequestOptions}. */
// Suppression is safe because the type shared between the key and value pairs that
// represent capture request options is checked on the Dart side.
@SuppressWarnings("unchecked")
@OptIn(markerClass = androidx.camera.camera2.interop.ExperimentalCamera2Interop.class)
public @NonNull CaptureRequestOptions create(
@NonNull Map<CaptureRequestKeySupportedType, Object> options) {
CaptureRequestOptions.Builder builder = getCaptureRequestOptionsBuilder();
for (Map.Entry<CaptureRequestKeySupportedType, Object> option : options.entrySet()) {
CaptureRequestKeySupportedType optionKeyType = option.getKey();
CaptureRequest.Key<? extends Object> optionKey = getCaptureRequestKey(optionKeyType);
Object optionValue = option.getValue();
if (optionValue == null) {
builder.clearCaptureRequestOption(optionKey);
continue;
}
switch (optionKeyType) {
case CONTROL_AE_LOCK:
builder.setCaptureRequestOption(
(CaptureRequest.Key<Boolean>) optionKey, (Boolean) optionValue);
break;
default:
throw new IllegalArgumentException(
"The capture request key "
+ optionKeyType.toString()
+ "is not currently supported by the plugin.");
}
}
return builder.build();
}
private CaptureRequest.Key<? extends Object> getCaptureRequestKey(
CaptureRequestKeySupportedType type) {
CaptureRequest.Key<? extends Object> key;
switch (type) {
case CONTROL_AE_LOCK:
key = CaptureRequest.CONTROL_AE_LOCK;
break;
default:
throw new IllegalArgumentException(
"The capture request key is not currently supported by the plugin.");
}
return key;
}
@VisibleForTesting
@OptIn(markerClass = androidx.camera.camera2.interop.ExperimentalCamera2Interop.class)
public @NonNull CaptureRequestOptions.Builder getCaptureRequestOptionsBuilder() {
return new CaptureRequestOptions.Builder();
}
}
/**
* Constructs a {@link CaptureRequestOptionsHostApiImpl}.
*
* @param instanceManager maintains instances stored to communicate with attached Dart objects
*/
public CaptureRequestOptionsHostApiImpl(@NonNull InstanceManager instanceManager) {
this(instanceManager, new CaptureRequestOptionsProxy());
}
/**
* Constructs a {@link CaptureRequestOptionsHostApiImpl}.
*
* @param instanceManager maintains instances stored to communicate with attached Dart objects
* @param proxy proxy for constructor of {@link CaptureRequestOptions}
*/
@VisibleForTesting
CaptureRequestOptionsHostApiImpl(
@NonNull InstanceManager instanceManager, @NonNull CaptureRequestOptionsProxy proxy) {
this.instanceManager = instanceManager;
this.proxy = proxy;
}
@Override
public void create(@NonNull Long identifier, @NonNull Map<Long, Object> options) {
Map<CaptureRequestKeySupportedType, Object> decodedOptions =
new HashMap<CaptureRequestKeySupportedType, Object>();
for (Map.Entry<Long, Object> option : options.entrySet()) {
Integer index = ((Number) option.getKey()).intValue();
decodedOptions.put(CaptureRequestKeySupportedType.values()[index], option.getValue());
}
instanceManager.addDartCreatedInstance(proxy.create(decodedOptions), identifier);
}
}
| packages/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CaptureRequestOptionsHostApiImpl.java/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/CaptureRequestOptionsHostApiImpl.java",
"repo_id": "packages",
"token_count": 1529
} | 971 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugins.camerax;
import androidx.annotation.NonNull;
import androidx.annotation.VisibleForTesting;
import androidx.lifecycle.LiveData;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugins.camerax.GeneratedCameraXLibrary.LiveDataFlutterApi;
import io.flutter.plugins.camerax.GeneratedCameraXLibrary.LiveDataSupportedType;
import io.flutter.plugins.camerax.GeneratedCameraXLibrary.LiveDataSupportedTypeData;
/**
* Flutter API implementation for {@link LiveData}.
*
* <p>This class may handle adding native instances that are attached to a Dart instance or passing
* arguments of callbacks methods to a Dart instance.
*/
public class LiveDataFlutterApiWrapper {
private final BinaryMessenger binaryMessenger;
private final InstanceManager instanceManager;
private LiveDataFlutterApi liveDataFlutterApi;
/**
* Constructs a {@link LiveDataFlutterApiWrapper}.
*
* @param binaryMessenger used to communicate with Dart over asynchronous messages
* @param instanceManager maintains instances stored to communicate with attached Dart objects
*/
public LiveDataFlutterApiWrapper(
@NonNull BinaryMessenger binaryMessenger, @NonNull InstanceManager instanceManager) {
this.binaryMessenger = binaryMessenger;
this.instanceManager = instanceManager;
liveDataFlutterApi = new LiveDataFlutterApi(binaryMessenger);
}
/**
* Stores the {@link LiveData} instance and notifies Dart to create and store a new {@link
* LiveData} instance that is attached to this one. If {@code instance} has already been added,
* this method does nothing.
*/
public void create(
@NonNull LiveData<?> instance,
@NonNull LiveDataSupportedType type,
@NonNull LiveDataFlutterApi.Reply<Void> callback) {
if (!instanceManager.containsInstance(instance)) {
liveDataFlutterApi.create(
instanceManager.addHostCreatedInstance(instance),
new LiveDataSupportedTypeData.Builder().setValue(type).build(),
callback);
}
}
/** Sets the Flutter API used to send messages to Dart. */
@VisibleForTesting
void setApi(@NonNull LiveDataFlutterApi api) {
this.liveDataFlutterApi = api;
}
}
| packages/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/LiveDataFlutterApiWrapper.java/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/LiveDataFlutterApiWrapper.java",
"repo_id": "packages",
"token_count": 725
} | 972 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugins.camerax;
import androidx.annotation.NonNull;
import androidx.camera.video.Recording;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugins.camerax.GeneratedCameraXLibrary.RecordingHostApi;
import java.util.Objects;
public class RecordingHostApiImpl implements RecordingHostApi {
private final BinaryMessenger binaryMessenger;
private final InstanceManager instanceManager;
public RecordingHostApiImpl(
@NonNull BinaryMessenger binaryMessenger, @NonNull InstanceManager instanceManager) {
this.binaryMessenger = binaryMessenger;
this.instanceManager = instanceManager;
}
@Override
public void close(@NonNull Long identifier) {
Recording recording = getRecordingFromInstanceId(identifier);
recording.close();
}
@Override
public void pause(@NonNull Long identifier) {
Recording recording = getRecordingFromInstanceId(identifier);
recording.pause();
}
@Override
public void resume(@NonNull Long identifier) {
Recording recording = getRecordingFromInstanceId(identifier);
recording.resume();
}
@Override
public void stop(@NonNull Long identifier) {
Recording recording = getRecordingFromInstanceId(identifier);
recording.stop();
}
private Recording getRecordingFromInstanceId(Long instanceId) {
return (Recording) Objects.requireNonNull(instanceManager.getInstance(instanceId));
}
}
| packages/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/RecordingHostApiImpl.java/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/android/src/main/java/io/flutter/plugins/camerax/RecordingHostApiImpl.java",
"repo_id": "packages",
"token_count": 455
} | 973 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugins.camerax;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.verify;
import androidx.camera.core.CameraState;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugins.camerax.GeneratedCameraXLibrary.CameraStateErrorFlutterApi;
import java.util.Objects;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoRule;
public class CameraStateErrorTest {
@Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
@Mock public CameraState.StateError mockCameraStateError;
@Mock public BinaryMessenger mockBinaryMessenger;
@Mock public CameraStateErrorFlutterApi mockFlutterApi;
InstanceManager instanceManager;
@Before
public void setUp() {
instanceManager = InstanceManager.create(identifier -> {});
}
@After
public void tearDown() {
instanceManager.stopFinalizationListener();
}
@Test
public void flutterApiCreate_makesCallToDartToCreateInstance() {
final CameraStateErrorFlutterApiWrapper flutterApi =
new CameraStateErrorFlutterApiWrapper(mockBinaryMessenger, instanceManager);
flutterApi.setApi(mockFlutterApi);
final Long code = 0L;
flutterApi.create(mockCameraStateError, code, reply -> {});
final long instanceIdentifier =
Objects.requireNonNull(
instanceManager.getIdentifierForStrongReference(mockCameraStateError));
verify(mockFlutterApi).create(eq(instanceIdentifier), eq(code), any());
}
}
| packages/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraStateErrorTest.java/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/CameraStateErrorTest.java",
"repo_id": "packages",
"token_count": 605
} | 974 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugins.camerax;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.app.Activity;
import android.content.Context;
import android.view.Display;
import android.view.WindowManager;
import androidx.camera.core.CameraInfo;
import androidx.camera.core.DisplayOrientedMeteringPointFactory;
import androidx.camera.core.MeteringPoint;
import androidx.camera.core.MeteringPointFactory;
import io.flutter.plugin.common.BinaryMessenger;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockedStatic;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoRule;
import org.mockito.stubbing.Answer;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
@RunWith(RobolectricTestRunner.class)
public class MeteringPointTest {
@Rule public MockitoRule mockitoRule = MockitoJUnit.rule();
@Mock public BinaryMessenger mockBinaryMessenger;
@Mock public MeteringPoint meteringPoint;
InstanceManager testInstanceManager;
@Before
public void setUp() {
testInstanceManager = InstanceManager.create(identifier -> {});
}
@After
public void tearDown() {
testInstanceManager.stopFinalizationListener();
}
@Test
@Config(sdk = 30)
public void hostApiCreate_createsExpectedMeteringPointWithSizeSpecified_AboveAndroid30() {
final MeteringPointHostApiImpl.MeteringPointProxy proxySpy =
spy(new MeteringPointHostApiImpl.MeteringPointProxy());
final MeteringPointHostApiImpl hostApi =
new MeteringPointHostApiImpl(testInstanceManager, proxySpy);
final Long meteringPointIdentifier = 78L;
final Float x = 0.25f;
final Float y = 0.18f;
final Float size = 0.6f;
final Float surfaceWidth = 1f;
final Float surfaceHeight = 1f;
final DisplayOrientedMeteringPointFactory mockDisplayOrientedMeteringPointFactory =
mock(DisplayOrientedMeteringPointFactory.class);
final Activity mockActivity = mock(Activity.class);
final Display mockDisplay = mock(Display.class);
final CameraInfo mockCameraInfo = mock(CameraInfo.class);
final long mockCameraInfoId = 55L;
hostApi.setActivity(mockActivity);
testInstanceManager.addDartCreatedInstance(mockCameraInfo, mockCameraInfoId);
when(mockActivity.getDisplay()).thenReturn(mockDisplay);
when(proxySpy.getDisplayOrientedMeteringPointFactory(
mockDisplay, mockCameraInfo, surfaceWidth, surfaceHeight))
.thenReturn(mockDisplayOrientedMeteringPointFactory);
when(mockDisplayOrientedMeteringPointFactory.createPoint(x, y, size)).thenReturn(meteringPoint);
hostApi.create(
meteringPointIdentifier,
x.doubleValue(),
y.doubleValue(),
size.doubleValue(),
mockCameraInfoId);
verify(mockDisplayOrientedMeteringPointFactory).createPoint(x, y, size);
assertEquals(testInstanceManager.getInstance(meteringPointIdentifier), meteringPoint);
}
@Test
@Config(sdk = 29)
@SuppressWarnings("deprecation")
public void hostApiCreate_createsExpectedMeteringPointWithSizeSpecified_BelowAndroid30() {
final MeteringPointHostApiImpl.MeteringPointProxy proxySpy =
spy(new MeteringPointHostApiImpl.MeteringPointProxy());
final MeteringPointHostApiImpl hostApi =
new MeteringPointHostApiImpl(testInstanceManager, proxySpy);
final Long meteringPointIdentifier = 78L;
final Float x = 0.3f;
final Float y = 0.2f;
final Float size = 6f;
final Float surfaceWidth = 1f;
final Float surfaceHeight = 1f;
final DisplayOrientedMeteringPointFactory mockDisplayOrientedMeteringPointFactory =
mock(DisplayOrientedMeteringPointFactory.class);
final Activity mockActivity = mock(Activity.class);
final WindowManager mockWindowManager = mock(WindowManager.class);
final Display mockDisplay = mock(Display.class);
final CameraInfo mockCameraInfo = mock(CameraInfo.class);
final long mockCameraInfoId = 5L;
hostApi.setActivity(mockActivity);
testInstanceManager.addDartCreatedInstance(mockCameraInfo, mockCameraInfoId);
when(mockActivity.getSystemService(Context.WINDOW_SERVICE)).thenReturn(mockWindowManager);
when(mockWindowManager.getDefaultDisplay()).thenReturn(mockDisplay);
when(proxySpy.getDisplayOrientedMeteringPointFactory(
mockDisplay, mockCameraInfo, surfaceWidth, surfaceHeight))
.thenReturn(mockDisplayOrientedMeteringPointFactory);
when(mockDisplayOrientedMeteringPointFactory.createPoint(x, y, size)).thenReturn(meteringPoint);
hostApi.create(
meteringPointIdentifier,
x.doubleValue(),
y.doubleValue(),
size.doubleValue(),
mockCameraInfoId);
verify(mockDisplayOrientedMeteringPointFactory).createPoint(x, y, size);
assertEquals(testInstanceManager.getInstance(meteringPointIdentifier), meteringPoint);
}
@Test
@Config(sdk = 30)
public void hostApiCreate_createsExpectedMeteringPointWithoutSizeSpecified_AboveAndroid30() {
final MeteringPointHostApiImpl.MeteringPointProxy proxySpy =
spy(new MeteringPointHostApiImpl.MeteringPointProxy());
final MeteringPointHostApiImpl hostApi =
new MeteringPointHostApiImpl(testInstanceManager, proxySpy);
final Long meteringPointIdentifier = 78L;
final Float x = 0.23f;
final Float y = 0.32f;
final Float surfaceWidth = 1f;
final Float surfaceHeight = 1f;
final DisplayOrientedMeteringPointFactory mockDisplayOrientedMeteringPointFactory =
mock(DisplayOrientedMeteringPointFactory.class);
final Activity mockActivity = mock(Activity.class);
final Display mockDisplay = mock(Display.class);
final CameraInfo mockCameraInfo = mock(CameraInfo.class);
final long mockCameraInfoId = 6L;
hostApi.setActivity(mockActivity);
testInstanceManager.addDartCreatedInstance(mockCameraInfo, mockCameraInfoId);
when(mockActivity.getDisplay()).thenReturn(mockDisplay);
when(proxySpy.getDisplayOrientedMeteringPointFactory(
mockDisplay, mockCameraInfo, surfaceWidth, surfaceHeight))
.thenReturn(mockDisplayOrientedMeteringPointFactory);
when(mockDisplayOrientedMeteringPointFactory.createPoint(x, y)).thenReturn(meteringPoint);
hostApi.create(
meteringPointIdentifier, x.doubleValue(), y.doubleValue(), null, mockCameraInfoId);
verify(mockDisplayOrientedMeteringPointFactory).createPoint(x, y);
assertEquals(testInstanceManager.getInstance(meteringPointIdentifier), meteringPoint);
}
@Test
@Config(sdk = 29)
@SuppressWarnings("deprecation")
public void hostApiCreate_createsExpectedMeteringPointWithoutSizeSpecified_BelowAndroid30() {
final MeteringPointHostApiImpl.MeteringPointProxy proxySpy =
spy(new MeteringPointHostApiImpl.MeteringPointProxy());
final MeteringPointHostApiImpl hostApi =
new MeteringPointHostApiImpl(testInstanceManager, proxySpy);
final Long meteringPointIdentifier = 78L;
final Float x = 0.1f;
final Float y = 0.8f;
final Float surfaceWidth = 1f;
final Float surfaceHeight = 1f;
final DisplayOrientedMeteringPointFactory mockDisplayOrientedMeteringPointFactory =
mock(DisplayOrientedMeteringPointFactory.class);
final Activity mockActivity = mock(Activity.class);
final WindowManager mockWindowManager = mock(WindowManager.class);
final Display mockDisplay = mock(Display.class);
final CameraInfo mockCameraInfo = mock(CameraInfo.class);
final long mockCameraInfoId = 7L;
hostApi.setActivity(mockActivity);
testInstanceManager.addDartCreatedInstance(mockCameraInfo, mockCameraInfoId);
when(mockActivity.getSystemService(Context.WINDOW_SERVICE)).thenReturn(mockWindowManager);
when(mockWindowManager.getDefaultDisplay()).thenReturn(mockDisplay);
when(proxySpy.getDisplayOrientedMeteringPointFactory(
mockDisplay, mockCameraInfo, surfaceWidth, surfaceHeight))
.thenReturn(mockDisplayOrientedMeteringPointFactory);
when(mockDisplayOrientedMeteringPointFactory.createPoint(x, y)).thenReturn(meteringPoint);
hostApi.create(
meteringPointIdentifier, x.doubleValue(), y.doubleValue(), null, mockCameraInfoId);
verify(mockDisplayOrientedMeteringPointFactory).createPoint(x, y);
assertEquals(testInstanceManager.getInstance(meteringPointIdentifier), meteringPoint);
}
@Test
public void getDefaultPointSize_returnsExpectedSize() {
try (MockedStatic<MeteringPointFactory> mockedMeteringPointFactory =
Mockito.mockStatic(MeteringPointFactory.class)) {
final MeteringPointHostApiImpl meteringPointHostApiImpl =
new MeteringPointHostApiImpl(testInstanceManager);
final Long meteringPointIdentifier = 93L;
final Long index = 2L;
final Double defaultPointSize = 4D;
testInstanceManager.addDartCreatedInstance(meteringPoint, meteringPointIdentifier);
mockedMeteringPointFactory
.when(() -> MeteringPointFactory.getDefaultPointSize())
.thenAnswer((Answer<Float>) invocation -> defaultPointSize.floatValue());
assertEquals(meteringPointHostApiImpl.getDefaultPointSize(), defaultPointSize);
mockedMeteringPointFactory.verify(() -> MeteringPointFactory.getDefaultPointSize());
}
}
}
| packages/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/MeteringPointTest.java/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/android/src/test/java/io/flutter/plugins/camerax/MeteringPointTest.java",
"repo_id": "packages",
"token_count": 3273
} | 975 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:math' show Point;
import 'package:async/async.dart';
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter/services.dart'
show DeviceOrientation, PlatformException;
import 'package:flutter/widgets.dart';
import 'package:stream_transform/stream_transform.dart';
import 'analyzer.dart';
import 'camera.dart';
import 'camera2_camera_control.dart';
import 'camera_control.dart';
import 'camera_info.dart';
import 'camera_selector.dart';
import 'camera_state.dart';
import 'camerax_library.g.dart';
import 'camerax_proxy.dart';
import 'capture_request_options.dart';
import 'device_orientation_manager.dart';
import 'exposure_state.dart';
import 'fallback_strategy.dart';
import 'focus_metering_action.dart';
import 'focus_metering_result.dart';
import 'image_analysis.dart';
import 'image_capture.dart';
import 'image_proxy.dart';
import 'live_data.dart';
import 'metering_point.dart';
import 'observer.dart';
import 'pending_recording.dart';
import 'plane_proxy.dart';
import 'preview.dart';
import 'process_camera_provider.dart';
import 'quality_selector.dart';
import 'recorder.dart';
import 'recording.dart';
import 'resolution_selector.dart';
import 'resolution_strategy.dart';
import 'surface.dart';
import 'system_services.dart';
import 'use_case.dart';
import 'video_capture.dart';
import 'zoom_state.dart';
/// The Android implementation of [CameraPlatform] that uses the CameraX library.
class AndroidCameraCameraX extends CameraPlatform {
/// Constructs an [AndroidCameraCameraX].
AndroidCameraCameraX();
/// Registers this class as the default instance of [CameraPlatform].
static void registerWith() {
CameraPlatform.instance = AndroidCameraCameraX();
}
/// Proxy for creating `JavaObject`s and calling their methods that require
/// testing.
@visibleForTesting
CameraXProxy proxy = CameraXProxy();
/// The [ProcessCameraProvider] instance used to access camera functionality.
@visibleForTesting
ProcessCameraProvider? processCameraProvider;
/// The [Camera] instance returned by the [processCameraProvider] when a [UseCase] is
/// bound to the lifecycle of the camera it manages.
@visibleForTesting
Camera? camera;
/// The [CameraInfo] instance that corresponds to the [camera] instance.
@visibleForTesting
CameraInfo? cameraInfo;
/// The [CameraControl] instance that corresponds to the [camera] instance.
late CameraControl cameraControl;
/// The [LiveData] of the [CameraState] that represents the state of the
/// [camera] instance.
LiveData<CameraState>? liveCameraState;
/// The [Preview] instance that can be configured to present a live camera preview.
@visibleForTesting
Preview? preview;
/// The [VideoCapture] instance that can be instantiated and configured to
/// handle video recording
@visibleForTesting
VideoCapture? videoCapture;
/// The [Recorder] instance handling the current creating a new [PendingRecording].
@visibleForTesting
Recorder? recorder;
/// The [PendingRecording] instance used to create an active [Recording].
@visibleForTesting
PendingRecording? pendingRecording;
/// The [Recording] instance representing the current recording.
@visibleForTesting
Recording? recording;
/// The path at which the video file will be saved for the current [Recording].
@visibleForTesting
String? videoOutputPath;
/// Whether or not [preview] has been bound to the lifecycle of the camera by
/// [createCamera].
@visibleForTesting
bool previewInitiallyBound = false;
bool _previewIsPaused = false;
/// The prefix used to create the filename for video recording files.
@visibleForTesting
final String videoPrefix = 'MOV';
/// The [ImageCapture] instance that can be configured to capture a still image.
@visibleForTesting
ImageCapture? imageCapture;
/// The flash mode currently configured for [imageCapture].
int? _currentFlashMode;
/// Whether or not torch flash mode has been enabled for the [camera].
@visibleForTesting
bool torchEnabled = false;
/// The [ImageAnalysis] instance that can be configured to analyze individual
/// frames.
ImageAnalysis? imageAnalysis;
/// The [CameraSelector] used to configure the [processCameraProvider] to use
/// the desired camera.
@visibleForTesting
CameraSelector? cameraSelector;
/// The controller we need to broadcast the different camera events.
///
/// It is a `broadcast` because multiple controllers will connect to
/// different stream views of this Controller.
/// This is only exposed for test purposes. It shouldn't be used by clients of
/// the plugin as it may break or change at any time.
@visibleForTesting
final StreamController<CameraEvent> cameraEventStreamController =
StreamController<CameraEvent>.broadcast();
/// The stream of camera events.
Stream<CameraEvent> _cameraEvents(int cameraId) =>
cameraEventStreamController.stream
.where((CameraEvent event) => event.cameraId == cameraId);
/// The controller we need to stream image data.
@visibleForTesting
StreamController<CameraImageData>? cameraImageDataStreamController;
/// Constant representing the multi-plane Android YUV 420 image format.
///
/// See https://developer.android.com/reference/android/graphics/ImageFormat#YUV_420_888.
static const int imageFormatYuv420_888 = 35;
/// Constant representing the compressed JPEG image format.
///
/// See https://developer.android.com/reference/android/graphics/ImageFormat#JPEG.
static const int imageFormatJpeg = 256;
/// Error code indicating a [ZoomState] was requested, but one has not been
/// set for the camera in use.
static const String zoomStateNotSetErrorCode = 'zoomStateNotSet';
/// Whether or not the capture orientation is locked.
///
/// Indicates a new target rotation should not be set as it has been locked by
/// [lockCaptureOrientation].
@visibleForTesting
bool captureOrientationLocked = false;
/// Whether or not the default rotation for [UseCase]s needs to be set
/// manually because the capture orientation was previously locked.
///
/// Currently, CameraX provides no way to unset target rotations for
/// [UseCase]s, so once they are set and unset, this plugin must start setting
/// the default orientation manually.
///
/// See https://developer.android.com/reference/androidx/camera/core/ImageCapture#setTargetRotation(int)
/// for an example on how setting target rotations for [UseCase]s works.
bool shouldSetDefaultRotation = false;
/// Error code indicating that an exposure offset value failed to be set.
static const String setExposureOffsetFailedErrorCode =
'setExposureOffsetFailed';
/// The currently set [FocusMeteringAction] used to enable auto-focus and
/// auto-exposure.
@visibleForTesting
FocusMeteringAction? currentFocusMeteringAction;
/// Current focus mode set via [setFocusMode].
///
/// CameraX defaults to auto focus mode.
FocusMode _currentFocusMode = FocusMode.auto;
/// Current exposure mode set via [setExposureMode].
///
/// CameraX defaults to auto exposure mode.
ExposureMode _currentExposureMode = ExposureMode.auto;
/// Whether or not a default focus point of the entire sensor area was focused
/// and locked.
///
/// This should only be true if [setExposureMode] was called to set
/// [FocusMode.locked] and no previous focus point was set via
/// [setFocusPoint].
bool _defaultFocusPointLocked = false;
/// Error code indicating that exposure compensation is not supported by
/// CameraX for the device.
static const String exposureCompensationNotSupported =
'exposureCompensationNotSupported';
/// Returns list of all available cameras and their descriptions.
@override
Future<List<CameraDescription>> availableCameras() async {
final List<CameraDescription> cameraDescriptions = <CameraDescription>[];
processCameraProvider ??= await proxy.getProcessCameraProvider();
final List<CameraInfo> cameraInfos =
await processCameraProvider!.getAvailableCameraInfos();
CameraLensDirection? cameraLensDirection;
int cameraCount = 0;
int? cameraSensorOrientation;
String? cameraName;
for (final CameraInfo cameraInfo in cameraInfos) {
// Determine the lens direction by filtering the CameraInfo
// TODO(gmackall): replace this with call to CameraInfo.getLensFacing when changes containing that method are available
if ((await proxy
.createCameraSelector(CameraSelector.lensFacingBack)
.filter(<CameraInfo>[cameraInfo]))
.isNotEmpty) {
cameraLensDirection = CameraLensDirection.back;
} else if ((await proxy
.createCameraSelector(CameraSelector.lensFacingFront)
.filter(<CameraInfo>[cameraInfo]))
.isNotEmpty) {
cameraLensDirection = CameraLensDirection.front;
} else {
//Skip this CameraInfo as its lens direction is unknown
continue;
}
cameraSensorOrientation = await cameraInfo.getSensorRotationDegrees();
cameraName = 'Camera $cameraCount';
cameraCount++;
cameraDescriptions.add(CameraDescription(
name: cameraName,
lensDirection: cameraLensDirection,
sensorOrientation: cameraSensorOrientation));
}
return cameraDescriptions;
}
/// Creates an uninitialized camera instance and returns the camera ID.
///
/// In the CameraX library, cameras are accessed by combining [UseCase]s
/// to an instance of a [ProcessCameraProvider]. Thus, to create an
/// uninitialized camera instance, this method retrieves a
/// [ProcessCameraProvider] instance.
///
/// The specified [resolutionPreset] is the target resolution that CameraX
/// will attempt to select for the [UseCase]s constructed in this method
/// ([preview], [imageCapture], [imageAnalysis], [videoCapture]). If
/// unavailable, a fallback behavior of targeting the next highest resolution
/// will be attempted. See https://developer.android.com/media/camera/camerax/configuration#specify-resolution.
///
/// To return the camera ID, which is equivalent to the ID of the surface texture
/// that a camera preview can be drawn to, a [Preview] instance is configured
/// and bound to the [ProcessCameraProvider] instance.
@override
Future<int> createCamera(
CameraDescription cameraDescription,
ResolutionPreset? resolutionPreset, {
bool enableAudio = false,
}) async {
// Must obtain proper permissions before attempting to access a camera.
await proxy.requestCameraPermissions(enableAudio);
// Save CameraSelector that matches cameraDescription.
final int cameraSelectorLensDirection =
_getCameraSelectorLensDirection(cameraDescription.lensDirection);
final bool cameraIsFrontFacing =
cameraSelectorLensDirection == CameraSelector.lensFacingFront;
cameraSelector = proxy.createCameraSelector(cameraSelectorLensDirection);
// Start listening for device orientation changes preceding camera creation.
proxy.startListeningForDeviceOrientationChange(
cameraIsFrontFacing, cameraDescription.sensorOrientation);
// Determine ResolutionSelector and QualitySelector based on
// resolutionPreset for camera UseCases.
final ResolutionSelector? presetResolutionSelector =
_getResolutionSelectorFromPreset(resolutionPreset);
final QualitySelector? presetQualitySelector =
_getQualitySelectorFromPreset(resolutionPreset);
// Retrieve a fresh ProcessCameraProvider instance.
processCameraProvider ??= await proxy.getProcessCameraProvider();
processCameraProvider!.unbindAll();
// Configure Preview instance.
preview = proxy.createPreview(presetResolutionSelector,
/* use CameraX default target rotation */ null);
final int flutterSurfaceTextureId =
await proxy.setPreviewSurfaceProvider(preview!);
// Configure ImageCapture instance.
imageCapture = proxy.createImageCapture(presetResolutionSelector,
/* use CameraX default target rotation */ null);
// Configure ImageAnalysis instance.
// Defaults to YUV_420_888 image format.
imageAnalysis = proxy.createImageAnalysis(presetResolutionSelector,
/* use CameraX default target rotation */ null);
// Configure VideoCapture and Recorder instances.
recorder = proxy.createRecorder(presetQualitySelector);
videoCapture = await proxy.createVideoCapture(recorder!);
// Bind configured UseCases to ProcessCameraProvider instance & mark Preview
// instance as bound but not paused. Video capture is bound at first use
// instead of here.
camera = await processCameraProvider!.bindToLifecycle(
cameraSelector!, <UseCase>[preview!, imageCapture!, imageAnalysis!]);
await _updateCameraInfoAndLiveCameraState(flutterSurfaceTextureId);
previewInitiallyBound = true;
_previewIsPaused = false;
return flutterSurfaceTextureId;
}
/// Initializes the camera on the device.
///
/// Since initialization of a camera does not directly map as an operation to
/// the CameraX library, this method just retrieves information about the
/// camera and sends a [CameraInitializedEvent].
///
/// [imageFormatGroup] is used to specify the image format used for image
/// streaming, but CameraX currently only supports YUV_420_888 (supported by
/// Flutter) and RGBA (not supported by Flutter). CameraX uses YUV_420_888
/// by default, so [imageFormatGroup] is not used.
@override
Future<void> initializeCamera(
int cameraId, {
ImageFormatGroup imageFormatGroup = ImageFormatGroup.unknown,
}) async {
// Configure CameraInitializedEvent to send as representation of a
// configured camera:
// Retrieve preview resolution.
if (preview == null) {
// No camera has been created; createCamera must be called before initializeCamera.
throw CameraException(
'cameraNotFound',
"Camera not found. Please call the 'create' method before calling 'initialize'",
);
}
final ResolutionInfo previewResolutionInfo =
await preview!.getResolutionInfo();
// Mark auto-focus, auto-exposure and setting points for focus & exposure
// as available operations as CameraX does its best across devices to
// support these by default.
const ExposureMode exposureMode = ExposureMode.auto;
const FocusMode focusMode = FocusMode.auto;
const bool exposurePointSupported = true;
const bool focusPointSupported = true;
cameraEventStreamController.add(CameraInitializedEvent(
cameraId,
previewResolutionInfo.width.toDouble(),
previewResolutionInfo.height.toDouble(),
exposureMode,
exposurePointSupported,
focusMode,
focusPointSupported));
}
/// Releases the resources of the accessed camera.
///
/// [cameraId] not used.
@override
Future<void> dispose(int cameraId) async {
preview?.releaseFlutterSurfaceTexture();
await liveCameraState?.removeObservers();
processCameraProvider?.unbindAll();
await imageAnalysis?.clearAnalyzer();
}
/// The camera has been initialized.
@override
Stream<CameraInitializedEvent> onCameraInitialized(int cameraId) {
return _cameraEvents(cameraId).whereType<CameraInitializedEvent>();
}
/// The camera's resolution has changed.
///
/// This stream currently has no events being added to it from this plugin.
@override
Stream<CameraResolutionChangedEvent> onCameraResolutionChanged(int cameraId) {
return _cameraEvents(cameraId).whereType<CameraResolutionChangedEvent>();
}
/// The camera started to close.
@override
Stream<CameraClosingEvent> onCameraClosing(int cameraId) {
return _cameraEvents(cameraId).whereType<CameraClosingEvent>();
}
/// The camera experienced an error.
@override
Stream<CameraErrorEvent> onCameraError(int cameraId) {
return StreamGroup.mergeBroadcast<
CameraErrorEvent>(<Stream<CameraErrorEvent>>[
SystemServices.cameraErrorStreamController.stream
.map<CameraErrorEvent>((String errorDescription) {
return CameraErrorEvent(cameraId, errorDescription);
}),
_cameraEvents(cameraId).whereType<CameraErrorEvent>()
]);
}
/// The camera finished recording a video.
@override
Stream<VideoRecordedEvent> onVideoRecordedEvent(int cameraId) {
return _cameraEvents(cameraId).whereType<VideoRecordedEvent>();
}
/// Locks the capture orientation.
@override
Future<void> lockCaptureOrientation(
int cameraId,
DeviceOrientation orientation,
) async {
// Flag that (1) default rotation for UseCases will need to be set manually
// if orientation is ever unlocked and (2) the capture orientation is locked
// and should not be changed until unlocked.
shouldSetDefaultRotation = true;
captureOrientationLocked = true;
// Get target rotation based on locked orientation.
final int targetLockedRotation =
_getRotationConstantFromDeviceOrientation(orientation);
// Update UseCases to use target device orientation.
await imageCapture!.setTargetRotation(targetLockedRotation);
await imageAnalysis!.setTargetRotation(targetLockedRotation);
await videoCapture!.setTargetRotation(targetLockedRotation);
}
/// Unlocks the capture orientation.
@override
Future<void> unlockCaptureOrientation(int cameraId) async {
// Flag that default rotation should be set for UseCases as needed.
captureOrientationLocked = false;
}
/// Sets the exposure point for automatically determining the exposure values.
///
/// Supplying `null` for the [point] argument will result in resetting to the
/// original exposure point value.
///
/// Supplied non-null point must be mapped to the entire un-altered preview
/// surface for the exposure point to be applied accurately.
///
/// [cameraId] is not used.
@override
Future<void> setExposurePoint(int cameraId, Point<double>? point) async {
// We lock the new focus and metering action if focus mode has been locked
// to ensure that the current focus point remains locked. Any exposure mode
// setting will not be impacted by this lock (setting an exposure mode
// is implemented with Camera2 interop that will override settings to
// achieve the expected exposure mode as needed).
await _startFocusAndMeteringForPoint(
point: point,
meteringMode: FocusMeteringAction.flagAe,
disableAutoCancel: _currentFocusMode == FocusMode.locked);
}
/// Gets the minimum supported exposure offset for the selected camera in EV units.
///
/// [cameraId] not used.
@override
Future<double> getMinExposureOffset(int cameraId) async {
final ExposureState exposureState = await cameraInfo!.getExposureState();
return exposureState.exposureCompensationRange.minCompensation *
exposureState.exposureCompensationStep;
}
/// Gets the maximum supported exposure offset for the selected camera in EV units.
///
/// [cameraId] not used.
@override
Future<double> getMaxExposureOffset(int cameraId) async {
final ExposureState exposureState = await cameraInfo!.getExposureState();
return exposureState.exposureCompensationRange.maxCompensation *
exposureState.exposureCompensationStep;
}
/// Sets the focus mode for taking pictures.
///
/// Setting [FocusMode.locked] will lock the current focus point if one exists
/// or the center of entire sensor area if not, and will stay locked until
/// either:
/// * Another focus point is set via [setFocusPoint] (which will then become
/// the locked focus point), or
/// * Locked focus mode is unset by setting [FocusMode.auto].
@override
Future<void> setFocusMode(int cameraId, FocusMode mode) async {
if (_currentFocusMode == mode) {
// Desired focus mode is already set.
return;
}
MeteringPoint? autoFocusPoint;
bool? disableAutoCancel;
switch (mode) {
case FocusMode.auto:
// Determine auto-focus point to restore, if any. We do not restore
// default auto-focus point if set previously to lock focus.
final MeteringPoint? unLockedFocusPoint = _defaultFocusPointLocked
? null
: currentFocusMeteringAction!.meteringPointInfos
.where(((MeteringPoint, int?) meteringPointInfo) =>
meteringPointInfo.$2 == FocusMeteringAction.flagAf)
.toList()
.first
.$1;
_defaultFocusPointLocked = false;
autoFocusPoint = unLockedFocusPoint;
disableAutoCancel = false;
case FocusMode.locked:
MeteringPoint? lockedFocusPoint;
// Determine if there is an auto-focus point set currently to lock.
if (currentFocusMeteringAction != null) {
final List<(MeteringPoint, int?)> possibleCurrentAfPoints =
currentFocusMeteringAction!.meteringPointInfos
.where(((MeteringPoint, int?) meteringPointInfo) =>
meteringPointInfo.$2 == FocusMeteringAction.flagAf)
.toList();
lockedFocusPoint = possibleCurrentAfPoints.isEmpty
? null
: possibleCurrentAfPoints.first.$1;
}
// If there isn't, lock center of entire sensor area by default.
if (lockedFocusPoint == null) {
lockedFocusPoint =
proxy.createMeteringPoint(0.5, 0.5, 1, cameraInfo!);
_defaultFocusPointLocked = true;
}
autoFocusPoint = lockedFocusPoint;
disableAutoCancel = true;
}
// Start appropriate focus and metering action.
final bool focusAndMeteringWasSuccessful = await _startFocusAndMeteringFor(
meteringPoint: autoFocusPoint,
meteringMode: FocusMeteringAction.flagAf,
disableAutoCancel: disableAutoCancel);
if (!focusAndMeteringWasSuccessful) {
// Do not update current focus mode.
return;
}
// Update current focus mode.
_currentFocusMode = mode;
// If focus mode was just locked and exposure mode is not, set auto exposure
// mode to ensure that disabling auto-cancel does not interfere with
// automatic exposure metering.
if (_currentExposureMode == ExposureMode.auto &&
_currentFocusMode == FocusMode.locked) {
await setExposureMode(cameraId, _currentExposureMode);
}
}
/// Gets the supported step size for exposure offset for the selected camera in EV units.
///
/// Returns -1 if exposure compensation is not supported for the device.
///
/// [cameraId] not used.
@override
Future<double> getExposureOffsetStepSize(int cameraId) async {
final ExposureState exposureState = await cameraInfo!.getExposureState();
final double exposureOffsetStepSize =
exposureState.exposureCompensationStep;
if (exposureOffsetStepSize == 0) {
// CameraX returns a step size of 0 if exposure compensation is not
// supported for the device.
return -1;
}
return exposureOffsetStepSize;
}
/// Sets the exposure offset for the selected camera.
///
/// The supplied [offset] value should be in EV units. 1 EV unit represents a
/// doubling in brightness. It should be between the minimum and maximum offsets
/// obtained through `getMinExposureOffset` and `getMaxExposureOffset` respectively.
/// Throws a `CameraException` when trying to set exposure offset on a device
/// that doesn't support exposure compensationan or if setting the offset fails,
/// like in the case that an illegal offset is supplied.
///
/// When the supplied [offset] value does not align with the step size obtained
/// through `getExposureStepSize`, it will automatically be rounded to the nearest step.
///
/// Returns the (rounded) offset value that was set.
@override
Future<double> setExposureOffset(int cameraId, double offset) async {
final double exposureOffsetStepSize =
(await cameraInfo!.getExposureState()).exposureCompensationStep;
if (exposureOffsetStepSize == 0) {
throw CameraException(exposureCompensationNotSupported,
'Exposure compensation not supported');
}
// (Exposure compensation index) * (exposure offset step size) =
// (exposure offset).
final int roundedExposureCompensationIndex =
(offset / exposureOffsetStepSize).round();
try {
final int? newIndex = await cameraControl
.setExposureCompensationIndex(roundedExposureCompensationIndex);
if (newIndex == null) {
throw CameraException(setExposureOffsetFailedErrorCode,
'Setting exposure compensation index was canceled due to the camera being closed or a new request being submitted.');
}
return newIndex.toDouble();
} on PlatformException catch (e) {
throw CameraException(
setExposureOffsetFailedErrorCode,
e.message ??
'Setting the camera exposure compensation index failed.');
}
}
/// Sets the focus point for automatically determining the focus values.
///
/// Supplying `null` for the [point] argument will result in resetting to the
/// original focus point value.
///
/// Supplied non-null point must be mapped to the entire un-altered preview
/// surface for the focus point to be applied accurately.
///
/// [cameraId] is not used.
@override
Future<void> setFocusPoint(int cameraId, Point<double>? point) async {
// We lock the new focus and metering action if focus mode has been locked
// to ensure that the current focus point remains locked. Any exposure mode
// setting will not be impacted by this lock (setting an exposure mode
// is implemented with Camera2 interop that will override settings to
// achieve the expected exposure mode as needed).
await _startFocusAndMeteringForPoint(
point: point,
meteringMode: FocusMeteringAction.flagAf,
disableAutoCancel: _currentFocusMode == FocusMode.locked);
}
/// Sets the exposure mode for taking pictures.
///
/// Setting [ExposureMode.locked] will lock current exposure point until it
/// is unset by setting [ExposureMode.auto].
///
/// [cameraId] is not used.
@override
Future<void> setExposureMode(int cameraId, ExposureMode mode) async {
final Camera2CameraControl camera2Control =
proxy.getCamera2CameraControl(cameraControl);
final bool lockExposureMode = mode == ExposureMode.locked;
final CaptureRequestOptions captureRequestOptions = proxy
.createCaptureRequestOptions(<(
CaptureRequestKeySupportedType,
Object?
)>[(CaptureRequestKeySupportedType.controlAeLock, lockExposureMode)]);
await camera2Control.addCaptureRequestOptions(captureRequestOptions);
_currentExposureMode = mode;
}
/// Gets the maximum supported zoom level for the selected camera.
///
/// [cameraId] not used.
@override
Future<double> getMaxZoomLevel(int cameraId) async {
final LiveData<ZoomState> liveZoomState = await cameraInfo!.getZoomState();
final ZoomState? zoomState = await liveZoomState.getValue();
if (zoomState == null) {
throw CameraException(
zoomStateNotSetErrorCode,
'No explicit ZoomState has been set on the LiveData instance for the camera in use.',
);
}
return zoomState.maxZoomRatio;
}
/// Gets the minimum supported zoom level for the selected camera.
///
/// [cameraId] not used.
@override
Future<double> getMinZoomLevel(int cameraId) async {
final LiveData<ZoomState> liveZoomState = await cameraInfo!.getZoomState();
final ZoomState? zoomState = await liveZoomState.getValue();
if (zoomState == null) {
throw CameraException(
zoomStateNotSetErrorCode,
'No explicit ZoomState has been set on the LiveData instance for the camera in use.',
);
}
return zoomState.minZoomRatio;
}
/// Set the zoom level for the selected camera.
///
/// The supplied [zoom] value should be between the minimum and the maximum
/// supported zoom level returned by [getMinZoomLevel] and [getMaxZoomLevel].
/// Throws a `CameraException` when an illegal zoom level is supplied.
@override
Future<void> setZoomLevel(int cameraId, double zoom) async {
await cameraControl.setZoomRatio(zoom);
}
/// The ui orientation changed.
@override
Stream<DeviceOrientationChangedEvent> onDeviceOrientationChanged() {
return DeviceOrientationManager
.deviceOrientationChangedStreamController.stream;
}
/// Pause the active preview on the current frame for the selected camera.
///
/// [cameraId] not used.
@override
Future<void> pausePreview(int cameraId) async {
_previewIsPaused = true;
await _unbindUseCaseFromLifecycle(preview!);
}
/// Resume the paused preview for the selected camera.
///
/// [cameraId] not used.
@override
Future<void> resumePreview(int cameraId) async {
_previewIsPaused = false;
await _bindPreviewToLifecycle(cameraId);
}
/// Returns a widget showing a live camera preview.
///
/// [createCamera] must be called before attempting to build this preview.
@override
Widget buildPreview(int cameraId) {
if (!previewInitiallyBound) {
// No camera has been created, and thus, the preview UseCase has not been
// bound to the camera lifecycle, restricting this preview from being
// built.
throw CameraException(
'cameraNotFound',
"Camera not found. Please call the 'create' method before calling 'buildPreview'",
);
}
return Texture(textureId: cameraId);
}
/// Captures an image and returns the file where it was saved.
///
/// [cameraId] is not used.
@override
Future<XFile> takePicture(int cameraId) async {
// Set flash mode.
if (_currentFlashMode != null) {
await imageCapture!.setFlashMode(_currentFlashMode!);
} else if (torchEnabled) {
// Ensure any previously set flash modes are unset when torch mode has
// been enabled.
await imageCapture!.setFlashMode(ImageCapture.flashModeOff);
}
// Set target rotation to default CameraX rotation only if capture
// orientation not locked.
if (!captureOrientationLocked && shouldSetDefaultRotation) {
await imageCapture!
.setTargetRotation(await proxy.getDefaultDisplayRotation());
}
final String picturePath = await imageCapture!.takePicture();
return XFile(picturePath);
}
/// Sets the flash mode for the selected camera.
///
/// When the [FlashMode.torch] is enabled, any previously set [FlashMode] with
/// this method will be disabled, just as with any other [FlashMode]; while
/// this is not default native Android behavior as defined by the CameraX API,
/// this behavior is compliant with the plugin platform interface.
///
/// This method combines the notion of setting the flash mode of the
/// [imageCapture] UseCase and enabling the camera torch, as described
/// by https://developer.android.com/reference/androidx/camera/core/ImageCapture
/// and https://developer.android.com/reference/androidx/camera/core/CameraControl#enableTorch(boolean),
/// respectively.
@override
Future<void> setFlashMode(int cameraId, FlashMode mode) async {
// Turn off torch mode if it is enabled and not being redundantly set.
if (mode != FlashMode.torch && torchEnabled) {
await cameraControl.enableTorch(false);
torchEnabled = false;
}
switch (mode) {
case FlashMode.off:
_currentFlashMode = ImageCapture.flashModeOff;
case FlashMode.auto:
_currentFlashMode = ImageCapture.flashModeAuto;
case FlashMode.always:
_currentFlashMode = ImageCapture.flashModeOn;
case FlashMode.torch:
_currentFlashMode = null;
if (torchEnabled) {
// Torch mode enabled already.
return;
}
await cameraControl.enableTorch(true);
torchEnabled = true;
}
}
/// Configures and starts a video recording. Returns silently without doing
/// anything if there is currently an active recording.
///
/// Note that the preset resolution is used to configure the recording, but
/// 240p ([ResolutionPreset.low]) is unsupported and will fallback to
/// configure the recording as the next highest available quality.
///
/// This method is deprecated in favour of [startVideoCapturing].
@override
Future<void> startVideoRecording(int cameraId,
{Duration? maxVideoDuration}) async {
return startVideoCapturing(
VideoCaptureOptions(cameraId, maxDuration: maxVideoDuration));
}
/// Starts a video recording and/or streaming session.
///
/// Please see [VideoCaptureOptions] for documentation on the
/// configuration options. Currently, maxVideoDuration and streamOptions
/// are unsupported due to the limitations of CameraX and the platform
/// interface, respectively.
@override
Future<void> startVideoCapturing(VideoCaptureOptions options) async {
if (recording != null) {
// There is currently an active recording, so do not start a new one.
return;
}
if (!(await processCameraProvider!.isBound(videoCapture!))) {
camera = await processCameraProvider!
.bindToLifecycle(cameraSelector!, <UseCase>[videoCapture!]);
await _updateCameraInfoAndLiveCameraState(options.cameraId);
}
// Set target rotation to default CameraX rotation only if capture
// orientation not locked.
if (!captureOrientationLocked && shouldSetDefaultRotation) {
await videoCapture!
.setTargetRotation(await proxy.getDefaultDisplayRotation());
}
videoOutputPath =
await SystemServices.getTempFilePath(videoPrefix, '.temp');
pendingRecording = await recorder!.prepareRecording(videoOutputPath!);
recording = await pendingRecording!.start();
if (options.streamCallback != null) {
onStreamedFrameAvailable(options.cameraId).listen(options.streamCallback);
}
}
/// Stops the video recording and returns the file where it was saved.
/// Throws a CameraException if the recording is currently null, or if the
/// videoOutputPath is null.
///
/// If the videoOutputPath is null the recording objects are cleaned up
/// so starting a new recording is possible.
@override
Future<XFile> stopVideoRecording(int cameraId) async {
if (recording == null) {
throw CameraException(
'videoRecordingFailed',
'Attempting to stop a '
'video recording while no recording is in progress.');
}
if (videoOutputPath == null) {
// Stop the current active recording as we will be unable to complete it
// in this error case.
await recording!.close();
recording = null;
pendingRecording = null;
throw CameraException(
'INVALID_PATH',
'The platform did not return a path '
'while reporting success. The platform should always '
'return a valid path or report an error.');
}
await recording!.close();
recording = null;
pendingRecording = null;
return XFile(videoOutputPath!);
}
/// Pause the current video recording if it is not null.
@override
Future<void> pauseVideoRecording(int cameraId) async {
if (recording != null) {
await recording!.pause();
}
}
/// Resume the current video recording if it is not null.
@override
Future<void> resumeVideoRecording(int cameraId) async {
if (recording != null) {
await recording!.resume();
}
}
/// A new streamed frame is available.
///
/// Listening to this stream will start streaming, and canceling will stop.
/// To temporarily stop receiving frames, cancel, then listen again later.
/// Pausing/resuming is not supported, as pausing the stream would cause
/// very high memory usage, and will throw an exception due to the
/// implementation using a broadcast [StreamController], which does not
/// support those operations.
///
/// [cameraId] and [options] are not used.
@override
Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
{CameraImageStreamOptions? options}) {
cameraImageDataStreamController = StreamController<CameraImageData>(
onListen: () => _configureImageAnalysis(cameraId),
onCancel: _onFrameStreamCancel,
);
return cameraImageDataStreamController!.stream;
}
// Methods for binding UseCases to the lifecycle of the camera controlled
// by a ProcessCameraProvider instance:
/// Binds [preview] instance to the camera lifecycle controlled by the
/// [processCameraProvider].
///
/// [cameraId] used to build [CameraEvent]s should you wish to filter
/// these based on a reference to a cameraId received from calling
/// `createCamera(...)`.
Future<void> _bindPreviewToLifecycle(int cameraId) async {
final bool previewIsBound = await processCameraProvider!.isBound(preview!);
if (previewIsBound || _previewIsPaused) {
// Only bind if preview is not already bound or intentionally paused.
return;
}
camera = await processCameraProvider!
.bindToLifecycle(cameraSelector!, <UseCase>[preview!]);
await _updateCameraInfoAndLiveCameraState(cameraId);
}
/// Configures the [imageAnalysis] instance for image streaming.
Future<void> _configureImageAnalysis(int cameraId) async {
// Set target rotation to default CameraX rotation only if capture
// orientation not locked.
if (!captureOrientationLocked && shouldSetDefaultRotation) {
await imageAnalysis!
.setTargetRotation(await proxy.getDefaultDisplayRotation());
}
// Create and set Analyzer that can read image data for image streaming.
final WeakReference<AndroidCameraCameraX> weakThis =
WeakReference<AndroidCameraCameraX>(this);
Future<void> analyze(ImageProxy imageProxy) async {
final List<PlaneProxy> planes = await imageProxy.getPlanes();
final List<CameraImagePlane> cameraImagePlanes = <CameraImagePlane>[];
for (final PlaneProxy plane in planes) {
cameraImagePlanes.add(CameraImagePlane(
bytes: plane.buffer,
bytesPerRow: plane.rowStride,
bytesPerPixel: plane.pixelStride));
}
final int format = imageProxy.format;
final CameraImageFormat cameraImageFormat = CameraImageFormat(
_imageFormatGroupFromPlatformData(format),
raw: format);
final CameraImageData cameraImageData = CameraImageData(
format: cameraImageFormat,
planes: cameraImagePlanes,
height: imageProxy.height,
width: imageProxy.width);
weakThis.target!.cameraImageDataStreamController!.add(cameraImageData);
await imageProxy.close();
}
final Analyzer analyzer = proxy.createAnalyzer(analyze);
await imageAnalysis!.setAnalyzer(analyzer);
}
/// Unbinds [useCase] from camera lifecycle controlled by the
/// [processCameraProvider].
Future<void> _unbindUseCaseFromLifecycle(UseCase useCase) async {
final bool useCaseIsBound = await processCameraProvider!.isBound(useCase);
if (!useCaseIsBound) {
return;
}
processCameraProvider!.unbind(<UseCase>[useCase]);
}
// Methods for configuring image streaming:
/// The [onCancel] callback for the stream controller used for image
/// streaming.
///
/// Removes the previously set analyzer on the [imageAnalysis] instance, since
/// image information should no longer be streamed.
FutureOr<void> _onFrameStreamCancel() async {
await imageAnalysis!.clearAnalyzer();
}
/// Converts between Android ImageFormat constants and [ImageFormatGroup]s.
///
/// See https://developer.android.com/reference/android/graphics/ImageFormat.
ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
switch (data) {
case imageFormatYuv420_888: // android.graphics.ImageFormat.YUV_420_888
return ImageFormatGroup.yuv420;
case imageFormatJpeg: // android.graphics.ImageFormat.JPEG
return ImageFormatGroup.jpeg;
}
return ImageFormatGroup.unknown;
}
// Methods concerning camera state:
/// Updates [cameraInfo] and [cameraControl] to the information corresponding
/// to [camera] and adds observers to the [LiveData] of the [CameraState] of
/// the current [camera], saved as [liveCameraState].
///
/// If a previous [liveCameraState] was stored, existing observers are
/// removed, as well.
Future<void> _updateCameraInfoAndLiveCameraState(int cameraId) async {
cameraInfo = await camera!.getCameraInfo();
cameraControl = await camera!.getCameraControl();
await liveCameraState?.removeObservers();
liveCameraState = await cameraInfo!.getCameraState();
await liveCameraState!.observe(_createCameraClosingObserver(cameraId));
}
/// Creates [Observer] of the [CameraState] that will:
///
/// * Send a [CameraClosingEvent] if the [CameraState] indicates that the
/// camera has begun to close.
/// * Send a [CameraErrorEvent] if the [CameraState] indicates that the
/// camera is in error state.
Observer<CameraState> _createCameraClosingObserver(int cameraId) {
final WeakReference<AndroidCameraCameraX> weakThis =
WeakReference<AndroidCameraCameraX>(this);
// Callback method used to implement the behavior described above:
void onChanged(Object stateAsObject) {
// This cast is safe because the Observer implementation ensures
// the type of stateAsObject is the same as the observer this callback
// is attached to.
final CameraState state = stateAsObject as CameraState;
if (state.type == CameraStateType.closing) {
weakThis.target!.cameraEventStreamController
.add(CameraClosingEvent(cameraId));
}
if (state.error != null) {
weakThis.target!.cameraEventStreamController
.add(CameraErrorEvent(cameraId, state.error!.getDescription()));
}
}
return proxy.createCameraStateObserver(onChanged);
}
// Methods for mapping Flutter camera constants to CameraX constants:
/// Returns [CameraSelector] lens direction that maps to specified
/// [CameraLensDirection].
int _getCameraSelectorLensDirection(CameraLensDirection lensDirection) {
switch (lensDirection) {
case CameraLensDirection.front:
return CameraSelector.lensFacingFront;
case CameraLensDirection.back:
return CameraSelector.lensFacingBack;
case CameraLensDirection.external:
return CameraSelector.lensFacingExternal;
}
}
/// Returns [Surface] constant for counter-clockwise degrees of rotation from
/// [DeviceOrientation.portraitUp] required to reach the specified
/// [DeviceOrientation].
int _getRotationConstantFromDeviceOrientation(DeviceOrientation orientation) {
switch (orientation) {
case DeviceOrientation.portraitUp:
return Surface.ROTATION_0;
case DeviceOrientation.landscapeLeft:
return Surface.ROTATION_90;
case DeviceOrientation.portraitDown:
return Surface.ROTATION_180;
case DeviceOrientation.landscapeRight:
return Surface.ROTATION_270;
}
}
/// Returns the [ResolutionSelector] that maps to the specified resolution
/// preset for camera [UseCase]s.
///
/// If the specified [preset] is unavailable, the camera will fall back to the
/// closest lower resolution available.
ResolutionSelector? _getResolutionSelectorFromPreset(
ResolutionPreset? preset) {
const int fallbackRule =
ResolutionStrategy.fallbackRuleClosestLowerThenHigher;
Size? boundSize;
ResolutionStrategy? resolutionStrategy;
switch (preset) {
case ResolutionPreset.low:
boundSize = const Size(320, 240);
case ResolutionPreset.medium:
boundSize = const Size(720, 480);
case ResolutionPreset.high:
boundSize = const Size(1280, 720);
case ResolutionPreset.veryHigh:
boundSize = const Size(1920, 1080);
case ResolutionPreset.ultraHigh:
boundSize = const Size(3840, 2160);
case ResolutionPreset.max:
// Automatically set strategy to choose highest available.
resolutionStrategy =
proxy.createResolutionStrategy(highestAvailable: true);
return proxy.createResolutionSelector(resolutionStrategy);
case null:
// If no preset is specified, default to CameraX's default behavior
// for each UseCase.
return null;
}
resolutionStrategy = proxy.createResolutionStrategy(
boundSize: boundSize, fallbackRule: fallbackRule);
return proxy.createResolutionSelector(resolutionStrategy);
}
/// Returns the [QualitySelector] that maps to the specified resolution
/// preset for the camera used only for video capture.
///
/// If the specified [preset] is unavailable, the camera will fall back to the
/// closest lower resolution available.
QualitySelector? _getQualitySelectorFromPreset(ResolutionPreset? preset) {
VideoQuality? videoQuality;
switch (preset) {
case ResolutionPreset.low:
// 240p is not supported by CameraX.
case ResolutionPreset.medium:
videoQuality = VideoQuality.SD;
case ResolutionPreset.high:
videoQuality = VideoQuality.HD;
case ResolutionPreset.veryHigh:
videoQuality = VideoQuality.FHD;
case ResolutionPreset.ultraHigh:
videoQuality = VideoQuality.UHD;
case ResolutionPreset.max:
videoQuality = VideoQuality.highest;
case null:
// If no preset is specified, default to CameraX's default behavior
// for each UseCase.
return null;
}
// We will choose the next highest video quality if the one desired
// is unavailable.
const VideoResolutionFallbackRule fallbackRule =
VideoResolutionFallbackRule.lowerQualityOrHigherThan;
final FallbackStrategy fallbackStrategy = proxy.createFallbackStrategy(
quality: videoQuality, fallbackRule: fallbackRule);
return proxy.createQualitySelector(
videoQuality: videoQuality, fallbackStrategy: fallbackStrategy);
}
// Methods for configuring auto-focus and auto-exposure:
Future<bool> _startFocusAndMeteringForPoint(
{required Point<double>? point,
required int meteringMode,
bool disableAutoCancel = false}) async {
return _startFocusAndMeteringFor(
meteringPoint: point == null
? null
: proxy.createMeteringPoint(
point.x, point.y, /* size */ null, cameraInfo!),
meteringMode: meteringMode,
disableAutoCancel: disableAutoCancel);
}
/// Starts a focus and metering action and returns whether or not it was
/// successful.
///
/// This method will modify and start the current action's [MeteringPoint]s
/// overriden with the [meteringPoint] provided for the specified
/// [meteringMode] type only, with all other metering points of other modes
/// left untouched. If no current action exists, only the specified
/// [meteringPoint] will be set. Thus, the focus and metering action started
/// will only contain at most the one most recently set metering point for
/// each metering mode: AF, AE, AWB.
///
/// Thus, if [meteringPoint] is non-null, this action includes:
/// * metering points and their modes previously added to
/// [currentFocusMeteringAction] that do not share a metering mode with
/// [meteringPoint] (if [currentFocusMeteringAction] is non-null) and
/// * [meteringPoint] with the specified [meteringMode].
/// If [meteringPoint] is null and [currentFocusMeteringAction] is non-null,
/// this action includes only metering points and their modes previously added
/// to [currentFocusMeteringAction] that do not share a metering mode with
/// [meteringPoint]. If [meteringPoint] and [currentFocusMeteringAction] are
/// null, then focus and metering will be canceled.
Future<bool> _startFocusAndMeteringFor(
{required MeteringPoint? meteringPoint,
required int meteringMode,
bool disableAutoCancel = false}) async {
if (meteringPoint == null) {
// Try to clear any metering point from previous action with the specified
// meteringMode.
if (currentFocusMeteringAction == null) {
// Attempting to clear a metering point from a previous action, but no
// such action exists.
return false;
}
// Remove metering point with specified meteringMode from current focus
// and metering action, as only one focus or exposure point may be set
// at once in this plugin.
final List<(MeteringPoint, int?)> newMeteringPointInfos =
currentFocusMeteringAction!.meteringPointInfos
.where(((MeteringPoint, int?) meteringPointInfo) =>
// meteringPointInfo may technically include points without a
// mode specified, but this logic is safe because this plugin
// only uses points that explicitly have mode
// FocusMeteringAction.flagAe or FocusMeteringAction.flagAf.
meteringPointInfo.$2 != meteringMode)
.toList();
if (newMeteringPointInfos.isEmpty) {
// If no other metering points were specified, cancel any previously
// started focus and metering actions.
await cameraControl.cancelFocusAndMetering();
currentFocusMeteringAction = null;
return true;
}
currentFocusMeteringAction = proxy.createFocusMeteringAction(
newMeteringPointInfos, disableAutoCancel);
} else if (meteringPoint.x < 0 ||
meteringPoint.x > 1 ||
meteringPoint.y < 0 ||
meteringPoint.y > 1) {
throw CameraException('pointInvalid',
'The coordinates of a metering point for an auto-focus or auto-exposure action must be within (0,0) and (1,1), but a point with coordinates (${meteringPoint.x}, ${meteringPoint.y}) was provided for metering mode $meteringMode.');
} else {
// Add new metering point with specified meteringMode, which may involve
// replacing a metering point with the same specified meteringMode from
// the current focus and metering action.
List<(MeteringPoint, int?)> newMeteringPointInfos =
<(MeteringPoint, int?)>[];
if (currentFocusMeteringAction != null) {
newMeteringPointInfos = currentFocusMeteringAction!.meteringPointInfos
.where(((MeteringPoint, int?) meteringPointInfo) =>
// meteringPointInfo may technically include points without a
// mode specified, but this logic is safe because this plugin
// only uses points that explicitly have mode
// FocusMeteringAction.flagAe or FocusMeteringAction.flagAf.
meteringPointInfo.$2 != meteringMode)
.toList();
}
newMeteringPointInfos.add((meteringPoint, meteringMode));
currentFocusMeteringAction = proxy.createFocusMeteringAction(
newMeteringPointInfos, disableAutoCancel);
}
final FocusMeteringResult? result =
await cameraControl.startFocusAndMetering(currentFocusMeteringAction!);
return await result?.isFocusSuccessful() ?? false;
}
}
| packages/packages/camera/camera_android_camerax/lib/src/android_camera_camerax.dart/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/lib/src/android_camera_camerax.dart",
"repo_id": "packages",
"token_count": 16281
} | 976 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:flutter/services.dart' show BinaryMessenger;
import 'package:meta/meta.dart' show immutable;
import 'camerax_library.g.dart';
import 'instance_manager.dart';
import 'java_object.dart';
import 'metering_point.dart';
/// A configuration used to trigger a focus and/or metering action.
///
/// See https://developer.android.com/reference/androidx/camera/core/FocusMeteringAction.
@immutable
class FocusMeteringAction extends JavaObject {
/// Creates a [FocusMeteringAction].
FocusMeteringAction({
BinaryMessenger? binaryMessenger,
InstanceManager? instanceManager,
required this.meteringPointInfos,
this.disableAutoCancel,
}) : super.detached(
binaryMessenger: binaryMessenger,
instanceManager: instanceManager,
) {
_api = _FocusMeteringActionHostApiImpl(
binaryMessenger: binaryMessenger, instanceManager: instanceManager);
_api.createFromInstance(this, meteringPointInfos, disableAutoCancel);
}
/// Creates a [FocusMeteringAction] that is not automatically attached to a
/// native object.
FocusMeteringAction.detached({
BinaryMessenger? binaryMessenger,
InstanceManager? instanceManager,
required this.meteringPointInfos,
this.disableAutoCancel,
}) : super.detached(
binaryMessenger: binaryMessenger,
instanceManager: instanceManager,
) {
_api = _FocusMeteringActionHostApiImpl(
binaryMessenger: binaryMessenger, instanceManager: instanceManager);
}
late final _FocusMeteringActionHostApiImpl _api;
/// The requested [MeteringPoint]s and modes that are relevant to each of those
/// points.
final List<(MeteringPoint meteringPoint, int? meteringMode)>
meteringPointInfos;
/// Disables the auto-cancel.
///
/// By default (and if set to false), auto-cancel is enabled with 5 seconds
/// duration.
final bool? disableAutoCancel;
/// Flag for metering mode that indicates the auto focus region is enabled.
///
/// An autofocus scan is also triggered when [flagAf] is assigned.
///
/// See https://developer.android.com/reference/androidx/camera/core/FocusMeteringAction#FLAG_AF().
static const int flagAf = 1;
/// Flag for metering mode that indicates the auto exposure region is enabled.
///
/// See https://developer.android.com/reference/androidx/camera/core/FocusMeteringAction#FLAG_AE().
static const int flagAe = 2;
/// Flag for metering mode that indicates the auto white balance region is
/// enabled.
///
/// See https://developer.android.com/reference/androidx/camera/core/FocusMeteringAction#FLAG_AWB().
static const int flagAwb = 4;
}
/// Host API implementation of [FocusMeteringAction].
class _FocusMeteringActionHostApiImpl extends FocusMeteringActionHostApi {
/// Constructs a [_FocusMeteringActionHostApiImpl].
///
/// If [binaryMessenger] is null, the default [BinaryMessenger] will be used,
/// which routes to the host platform.
///
/// An [instanceManager] is typically passed when a copy of an instance
/// contained by an [InstanceManager] is being created. If left null, it
/// will default to the global instance defined in [JavaObject].
_FocusMeteringActionHostApiImpl(
{this.binaryMessenger, InstanceManager? instanceManager}) {
this.instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
}
/// Receives binary data across the Flutter platform barrier.
///
/// If it is null, the default [BinaryMessenger] will be used which routes to
/// the host platform.
final BinaryMessenger? binaryMessenger;
/// Maintains instances stored to communicate with native language objects.
late final InstanceManager instanceManager;
/// Creates a [FocusMeteringAction] instance with the specified list of
/// [MeteringPoint]s and their modes in order of descending priority.
void createFromInstance(
FocusMeteringAction instance,
List<(MeteringPoint meteringPoint, int? meteringMode)> meteringPointInfos,
bool? disableAutoCancel) {
final int identifier = instanceManager.addDartCreatedInstance(instance,
onCopy: (FocusMeteringAction original) {
return FocusMeteringAction.detached(
binaryMessenger: binaryMessenger,
instanceManager: instanceManager,
meteringPointInfos: original.meteringPointInfos);
});
final List<MeteringPointInfo> meteringPointInfosWithIds =
<MeteringPointInfo>[];
for (final (
MeteringPoint meteringPoint,
int? meteringMode
) meteringPointInfo in meteringPointInfos) {
meteringPointInfosWithIds.add(MeteringPointInfo(
meteringPointId: instanceManager.getIdentifier(meteringPointInfo.$1)!,
meteringMode: meteringPointInfo.$2));
}
create(identifier, meteringPointInfosWithIds, disableAutoCancel);
}
}
| packages/packages/camera/camera_android_camerax/lib/src/focus_metering_action.dart/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/lib/src/focus_metering_action.dart",
"repo_id": "packages",
"token_count": 1562
} | 977 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:flutter/services.dart' show BinaryMessenger;
import 'package:meta/meta.dart' show immutable;
import 'android_camera_camerax_flutter_api_impls.dart';
import 'camerax_library.g.dart';
import 'instance_manager.dart';
import 'java_object.dart';
/// Wraps a CameraX recording class.
///
/// See https://developer.android.com/reference/androidx/camera/video/Recording.
@immutable
class Recording extends JavaObject {
/// Constructs a detached [Recording]
Recording.detached(
{BinaryMessenger? binaryMessenger, InstanceManager? instanceManager})
: super.detached(
binaryMessenger: binaryMessenger,
instanceManager: instanceManager,
) {
_api = RecordingHostApiImpl(
binaryMessenger: binaryMessenger, instanceManager: instanceManager);
AndroidCameraXCameraFlutterApis.instance.ensureSetUp();
}
late final RecordingHostApiImpl _api;
/// Closes this recording.
Future<void> close() {
return _api.closeFromInstance(this);
}
/// Pauses this recording if active.
Future<void> pause() {
return _api.pauseFromInstance(this);
}
/// Resumes the current recording if paused.
Future<void> resume() {
return _api.resumeFromInstance(this);
}
/// Stops the recording, as if calling close().
Future<void> stop() {
return _api.stopFromInstance(this);
}
}
/// Host API implementation of [Recording].
class RecordingHostApiImpl extends RecordingHostApi {
/// Creates a [RecordingHostApiImpl].
RecordingHostApiImpl({this.binaryMessenger, InstanceManager? instanceManager})
: super(binaryMessenger: binaryMessenger) {
this.instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
}
/// Receives binary data across the Flutter platform barrier.
///
/// If it is null, the default BinaryMessenger will be used which routes to
/// the host platform.
final BinaryMessenger? binaryMessenger;
/// Maintains instances stored to communicate with native language objects.
late final InstanceManager instanceManager;
/// Closes the specified recording instance.
Future<void> closeFromInstance(Recording recording) async {
await close(instanceManager.getIdentifier(recording)!);
}
/// Pauses the specified recording instance if active.
Future<void> pauseFromInstance(Recording recording) async {
await pause(instanceManager.getIdentifier(recording)!);
}
/// Resumes the specified recording instance if paused.
Future<void> resumeFromInstance(Recording recording) async {
await resume(instanceManager.getIdentifier(recording)!);
}
/// Stops the specified recording instance, as if calling closeFromInstance().
Future<void> stopFromInstance(Recording recording) async {
await stop(instanceManager.getIdentifier(recording)!);
}
}
/// Flutter API implementation of [Recording].
class RecordingFlutterApiImpl extends RecordingFlutterApi {
/// Constructs a [RecordingFlutterApiImpl].
RecordingFlutterApiImpl({
this.binaryMessenger,
InstanceManager? instanceManager,
}) : instanceManager = instanceManager ?? JavaObject.globalInstanceManager;
/// Receives binary data across the Flutter platform barrier.
///
/// If it is null, the default BinaryMessenger will be used which routes to
/// the host platform.
final BinaryMessenger? binaryMessenger;
/// Maintains instances stored to communicate with native language objects.
final InstanceManager instanceManager;
@override
void create(int identifier) {
instanceManager.addHostCreatedInstance(
Recording.detached(
binaryMessenger: binaryMessenger,
instanceManager: instanceManager,
),
identifier, onCopy: (Recording original) {
return Recording.detached(
binaryMessenger: binaryMessenger,
instanceManager: instanceManager,
);
});
}
}
| packages/packages/camera/camera_android_camerax/lib/src/recording.dart/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/lib/src/recording.dart",
"repo_id": "packages",
"token_count": 1202
} | 978 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:camera_android_camerax/src/camera2_camera_control.dart';
import 'package:camera_android_camerax/src/camera_control.dart';
import 'package:camera_android_camerax/src/capture_request_options.dart';
import 'package:camera_android_camerax/src/instance_manager.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:mockito/annotations.dart';
import 'package:mockito/mockito.dart';
import 'camera2_camera_control_test.mocks.dart';
import 'test_camerax_library.g.dart';
@GenerateMocks(<Type>[
CameraControl,
CaptureRequestOptions,
TestCamera2CameraControlHostApi,
TestInstanceManagerHostApi
])
void main() {
TestWidgetsFlutterBinding.ensureInitialized();
// Mocks the call to clear the native InstanceManager.
TestInstanceManagerHostApi.setup(MockTestInstanceManagerHostApi());
group('Camera2CameraControl', () {
tearDown(() {
TestCamera2CameraControlHostApi.setup(null);
TestInstanceManagerHostApi.setup(null);
});
test('detached create does not call create on the Java side', () {
final MockTestCamera2CameraControlHostApi mockApi =
MockTestCamera2CameraControlHostApi();
TestCamera2CameraControlHostApi.setup(mockApi);
final InstanceManager instanceManager = InstanceManager(
onWeakReferenceRemoved: (_) {},
);
Camera2CameraControl.detached(
cameraControl: MockCameraControl(),
instanceManager: instanceManager,
);
verifyNever(mockApi.create(argThat(isA<int>()), argThat(isA<int>())));
});
test('create calls create on the Java side', () {
final MockTestCamera2CameraControlHostApi mockApi =
MockTestCamera2CameraControlHostApi();
TestCamera2CameraControlHostApi.setup(mockApi);
final InstanceManager instanceManager = InstanceManager(
onWeakReferenceRemoved: (_) {},
);
final CameraControl mockCameraControl = MockCameraControl();
const int cameraControlIdentifier = 9;
instanceManager.addHostCreatedInstance(
mockCameraControl,
cameraControlIdentifier,
onCopy: (_) => CameraControl.detached(
instanceManager: instanceManager,
),
);
final Camera2CameraControl instance = Camera2CameraControl(
cameraControl: mockCameraControl,
instanceManager: instanceManager,
);
verify(mockApi.create(
instanceManager.getIdentifier(instance),
cameraControlIdentifier,
));
});
test(
'addCaptureRequestOptions makes call on Java side to add capture request options',
() async {
final MockTestCamera2CameraControlHostApi mockApi =
MockTestCamera2CameraControlHostApi();
TestCamera2CameraControlHostApi.setup(mockApi);
final InstanceManager instanceManager = InstanceManager(
onWeakReferenceRemoved: (_) {},
);
final Camera2CameraControl instance = Camera2CameraControl.detached(
cameraControl: MockCameraControl(),
instanceManager: instanceManager,
);
const int instanceIdentifier = 30;
instanceManager.addHostCreatedInstance(
instance,
instanceIdentifier,
onCopy: (Camera2CameraControl original) =>
Camera2CameraControl.detached(
cameraControl: original.cameraControl,
instanceManager: instanceManager,
),
);
final CaptureRequestOptions mockCaptureRequestOptions =
MockCaptureRequestOptions();
const int mockCaptureRequestOptionsIdentifier = 8;
instanceManager.addHostCreatedInstance(
mockCaptureRequestOptions,
mockCaptureRequestOptionsIdentifier,
onCopy: (_) => MockCaptureRequestOptions(),
);
await instance.addCaptureRequestOptions(
mockCaptureRequestOptions,
);
verify(mockApi.addCaptureRequestOptions(
instanceIdentifier,
mockCaptureRequestOptionsIdentifier,
));
});
});
}
| packages/packages/camera/camera_android_camerax/test/camera2_camera_control_test.dart/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/test/camera2_camera_control_test.dart",
"repo_id": "packages",
"token_count": 1498
} | 979 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:camera_android_camerax/src/device_orientation_manager.dart';
import 'package:camera_android_camerax/src/surface.dart';
import 'package:camera_platform_interface/camera_platform_interface.dart'
show DeviceOrientationChangedEvent;
import 'package:flutter/services.dart';
import 'package:flutter_test/flutter_test.dart';
import 'package:mockito/annotations.dart';
import 'package:mockito/mockito.dart';
import 'device_orientation_manager_test.mocks.dart';
import 'test_camerax_library.g.dart';
@GenerateMocks(
<Type>[TestInstanceManagerHostApi, TestDeviceOrientationManagerHostApi])
void main() {
TestWidgetsFlutterBinding.ensureInitialized();
// Mocks the call to clear the native InstanceManager.
TestInstanceManagerHostApi.setup(MockTestInstanceManagerHostApi());
group('DeviceOrientationManager', () {
tearDown(() => TestProcessCameraProviderHostApi.setup(null));
test(
'startListeningForDeviceOrientationChange makes request to start listening for new device orientations',
() async {
final MockTestDeviceOrientationManagerHostApi mockApi =
MockTestDeviceOrientationManagerHostApi();
TestDeviceOrientationManagerHostApi.setup(mockApi);
DeviceOrientationManager.startListeningForDeviceOrientationChange(
true, 90);
verify(mockApi.startListeningForDeviceOrientationChange(true, 90));
});
test(
'stopListeningForDeviceOrientationChange makes request to stop listening for new device orientations',
() async {
final MockTestDeviceOrientationManagerHostApi mockApi =
MockTestDeviceOrientationManagerHostApi();
TestDeviceOrientationManagerHostApi.setup(mockApi);
DeviceOrientationManager.stopListeningForDeviceOrientationChange();
verify(mockApi.stopListeningForDeviceOrientationChange());
});
test('getDefaultDisplayRotation retrieves expected rotation', () async {
final MockTestDeviceOrientationManagerHostApi mockApi =
MockTestDeviceOrientationManagerHostApi();
TestDeviceOrientationManagerHostApi.setup(mockApi);
const int expectedRotation = Surface.ROTATION_180;
when(mockApi.getDefaultDisplayRotation()).thenReturn(expectedRotation);
expect(await DeviceOrientationManager.getDefaultDisplayRotation(),
equals(expectedRotation));
verify(mockApi.getDefaultDisplayRotation());
});
test('onDeviceOrientationChanged adds new orientation to stream', () {
DeviceOrientationManager.deviceOrientationChangedStreamController.stream
.listen((DeviceOrientationChangedEvent event) {
expect(event.orientation, equals(DeviceOrientation.landscapeLeft));
});
DeviceOrientationManagerFlutterApiImpl()
.onDeviceOrientationChanged('LANDSCAPE_LEFT');
});
test(
'onDeviceOrientationChanged throws error if new orientation is invalid',
() {
expect(
() => DeviceOrientationManagerFlutterApiImpl()
.onDeviceOrientationChanged('FAKE_ORIENTATION'),
throwsA(isA<ArgumentError>().having(
(ArgumentError e) => e.message,
'message',
'"FAKE_ORIENTATION" is not a valid DeviceOrientation value')));
});
});
}
| packages/packages/camera/camera_android_camerax/test/device_orientation_manager_test.dart/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/test/device_orientation_manager_test.dart",
"repo_id": "packages",
"token_count": 1223
} | 980 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:camera_android_camerax/src/instance_manager.dart';
import 'package:flutter_test/flutter_test.dart';
void main() {
group('InstanceManager', () {
test('addHostCreatedInstance', () {
final Object object = Object();
final InstanceManager instanceManager =
InstanceManager(onWeakReferenceRemoved: (_) {});
instanceManager.addHostCreatedInstance(
object,
0,
onCopy: (_) => Object(),
);
expect(instanceManager.getIdentifier(object), 0);
expect(
instanceManager.getInstanceWithWeakReference(0),
object,
);
});
test('addHostCreatedInstance prevents already used objects and ids', () {
final Object object = Object();
final InstanceManager instanceManager =
InstanceManager(onWeakReferenceRemoved: (_) {});
instanceManager.addHostCreatedInstance(
object,
0,
onCopy: (_) => Object(),
);
expect(
() => instanceManager.addHostCreatedInstance(
object,
0,
onCopy: (_) => Object(),
),
throwsAssertionError,
);
expect(
() => instanceManager.addHostCreatedInstance(
Object(),
0,
onCopy: (_) => Object(),
),
throwsAssertionError,
);
});
test('addDartCreatedInstance', () {
final Object object = Object();
final InstanceManager instanceManager =
InstanceManager(onWeakReferenceRemoved: (_) {});
instanceManager.addDartCreatedInstance(
object,
onCopy: (_) => Object(),
);
final int? instanceId = instanceManager.getIdentifier(object);
expect(instanceId, isNotNull);
expect(
instanceManager.getInstanceWithWeakReference(instanceId!),
object,
);
});
test('removeWeakReference', () {
final Object object = Object();
int? weakInstanceId;
final InstanceManager instanceManager =
InstanceManager(onWeakReferenceRemoved: (int instanceId) {
weakInstanceId = instanceId;
});
instanceManager.addHostCreatedInstance(
object,
0,
onCopy: (_) => Object(),
);
expect(instanceManager.removeWeakReference(object), 0);
expect(
instanceManager.getInstanceWithWeakReference(0),
isA<Object>(),
);
expect(weakInstanceId, 0);
});
test('removeWeakReference removes only weak reference', () {
final Object object = Object();
final InstanceManager instanceManager =
InstanceManager(onWeakReferenceRemoved: (_) {});
instanceManager.addHostCreatedInstance(
object,
0,
onCopy: (_) => Object(),
);
expect(instanceManager.removeWeakReference(object), 0);
final Object copy = instanceManager.getInstanceWithWeakReference(
0,
)!;
expect(identical(object, copy), isFalse);
});
test('removeStrongReference', () {
final Object object = Object();
final InstanceManager instanceManager =
InstanceManager(onWeakReferenceRemoved: (_) {});
instanceManager.addHostCreatedInstance(
object,
0,
onCopy: (_) => Object(),
);
instanceManager.removeWeakReference(object);
expect(instanceManager.remove(0), isA<Object>());
expect(instanceManager.containsIdentifier(0), isFalse);
});
test('removeStrongReference removes only strong reference', () {
final Object object = Object();
final InstanceManager instanceManager =
InstanceManager(onWeakReferenceRemoved: (_) {});
instanceManager.addHostCreatedInstance(
object,
0,
onCopy: (_) => Object(),
);
expect(instanceManager.remove(0), isA<Object>());
expect(
instanceManager.getInstanceWithWeakReference(0),
object,
);
});
test('getInstance can add a new weak reference', () {
final Object object = Object();
final InstanceManager instanceManager =
InstanceManager(onWeakReferenceRemoved: (_) {});
instanceManager.addHostCreatedInstance(
object,
0,
onCopy: (_) => Object(),
);
instanceManager.removeWeakReference(object);
final Object newWeakCopy = instanceManager.getInstanceWithWeakReference(
0,
)!;
expect(identical(object, newWeakCopy), isFalse);
});
});
}
| packages/packages/camera/camera_android_camerax/test/instance_manager_test.dart/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/test/instance_manager_test.dart",
"repo_id": "packages",
"token_count": 1822
} | 981 |
// Mocks generated by Mockito 5.4.4 from annotations
// in camera_android_camerax/test/quality_selector_test.dart.
// Do not manually edit this file.
// ignore_for_file: no_leading_underscores_for_library_prefixes
import 'dart:async' as _i6;
import 'package:camera_android_camerax/src/camera_info.dart' as _i5;
import 'package:camera_android_camerax/src/camera_state.dart' as _i7;
import 'package:camera_android_camerax/src/camerax_library.g.dart' as _i4;
import 'package:camera_android_camerax/src/exposure_state.dart' as _i3;
import 'package:camera_android_camerax/src/fallback_strategy.dart' as _i9;
import 'package:camera_android_camerax/src/live_data.dart' as _i2;
import 'package:camera_android_camerax/src/zoom_state.dart' as _i8;
import 'package:mockito/mockito.dart' as _i1;
import 'test_camerax_library.g.dart' as _i10;
// ignore_for_file: type=lint
// ignore_for_file: avoid_redundant_argument_values
// ignore_for_file: avoid_setters_without_getters
// ignore_for_file: comment_references
// ignore_for_file: deprecated_member_use
// ignore_for_file: deprecated_member_use_from_same_package
// ignore_for_file: implementation_imports
// ignore_for_file: invalid_use_of_visible_for_testing_member
// ignore_for_file: prefer_const_constructors
// ignore_for_file: unnecessary_parenthesis
// ignore_for_file: camel_case_types
// ignore_for_file: subtype_of_sealed_class
class _FakeLiveData_0<T extends Object> extends _i1.SmartFake
implements _i2.LiveData<T> {
_FakeLiveData_0(
Object parent,
Invocation parentInvocation,
) : super(
parent,
parentInvocation,
);
}
class _FakeExposureState_1 extends _i1.SmartFake implements _i3.ExposureState {
_FakeExposureState_1(
Object parent,
Invocation parentInvocation,
) : super(
parent,
parentInvocation,
);
}
class _FakeResolutionInfo_2 extends _i1.SmartFake
implements _i4.ResolutionInfo {
_FakeResolutionInfo_2(
Object parent,
Invocation parentInvocation,
) : super(
parent,
parentInvocation,
);
}
/// A class which mocks [CameraInfo].
///
/// See the documentation for Mockito's code generation for more information.
// ignore: must_be_immutable
class MockCameraInfo extends _i1.Mock implements _i5.CameraInfo {
MockCameraInfo() {
_i1.throwOnMissingStub(this);
}
@override
_i6.Future<int> getSensorRotationDegrees() => (super.noSuchMethod(
Invocation.method(
#getSensorRotationDegrees,
[],
),
returnValue: _i6.Future<int>.value(0),
) as _i6.Future<int>);
@override
_i6.Future<_i2.LiveData<_i7.CameraState>> getCameraState() =>
(super.noSuchMethod(
Invocation.method(
#getCameraState,
[],
),
returnValue: _i6.Future<_i2.LiveData<_i7.CameraState>>.value(
_FakeLiveData_0<_i7.CameraState>(
this,
Invocation.method(
#getCameraState,
[],
),
)),
) as _i6.Future<_i2.LiveData<_i7.CameraState>>);
@override
_i6.Future<_i3.ExposureState> getExposureState() => (super.noSuchMethod(
Invocation.method(
#getExposureState,
[],
),
returnValue: _i6.Future<_i3.ExposureState>.value(_FakeExposureState_1(
this,
Invocation.method(
#getExposureState,
[],
),
)),
) as _i6.Future<_i3.ExposureState>);
@override
_i6.Future<_i2.LiveData<_i8.ZoomState>> getZoomState() => (super.noSuchMethod(
Invocation.method(
#getZoomState,
[],
),
returnValue: _i6.Future<_i2.LiveData<_i8.ZoomState>>.value(
_FakeLiveData_0<_i8.ZoomState>(
this,
Invocation.method(
#getZoomState,
[],
),
)),
) as _i6.Future<_i2.LiveData<_i8.ZoomState>>);
}
/// A class which mocks [FallbackStrategy].
///
/// See the documentation for Mockito's code generation for more information.
// ignore: must_be_immutable
class MockFallbackStrategy extends _i1.Mock implements _i9.FallbackStrategy {
MockFallbackStrategy() {
_i1.throwOnMissingStub(this);
}
@override
_i4.VideoQuality get quality => (super.noSuchMethod(
Invocation.getter(#quality),
returnValue: _i4.VideoQuality.SD,
) as _i4.VideoQuality);
@override
_i4.VideoResolutionFallbackRule get fallbackRule => (super.noSuchMethod(
Invocation.getter(#fallbackRule),
returnValue: _i4.VideoResolutionFallbackRule.higherQualityOrLowerThan,
) as _i4.VideoResolutionFallbackRule);
}
/// A class which mocks [TestQualitySelectorHostApi].
///
/// See the documentation for Mockito's code generation for more information.
class MockTestQualitySelectorHostApi extends _i1.Mock
implements _i10.TestQualitySelectorHostApi {
MockTestQualitySelectorHostApi() {
_i1.throwOnMissingStub(this);
}
@override
void create(
int? identifier,
List<_i4.VideoQualityData?>? videoQualityDataList,
int? fallbackStrategyId,
) =>
super.noSuchMethod(
Invocation.method(
#create,
[
identifier,
videoQualityDataList,
fallbackStrategyId,
],
),
returnValueForMissingStub: null,
);
@override
_i4.ResolutionInfo getResolution(
int? cameraInfoId,
_i4.VideoQuality? quality,
) =>
(super.noSuchMethod(
Invocation.method(
#getResolution,
[
cameraInfoId,
quality,
],
),
returnValue: _FakeResolutionInfo_2(
this,
Invocation.method(
#getResolution,
[
cameraInfoId,
quality,
],
),
),
) as _i4.ResolutionInfo);
}
/// A class which mocks [TestInstanceManagerHostApi].
///
/// See the documentation for Mockito's code generation for more information.
class MockTestInstanceManagerHostApi extends _i1.Mock
implements _i10.TestInstanceManagerHostApi {
MockTestInstanceManagerHostApi() {
_i1.throwOnMissingStub(this);
}
@override
void clear() => super.noSuchMethod(
Invocation.method(
#clear,
[],
),
returnValueForMissingStub: null,
);
}
| packages/packages/camera/camera_android_camerax/test/quality_selector_test.mocks.dart/0 | {
"file_path": "packages/packages/camera/camera_android_camerax/test/quality_selector_test.mocks.dart",
"repo_id": "packages",
"token_count": 2818
} | 982 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@import camera_avfoundation.Test;
@import AVFoundation;
@import XCTest;
@interface CameraPropertiesTests : XCTestCase
@end
@implementation CameraPropertiesTests
#pragma mark - flash mode tests
- (void)testFLTGetFLTFlashModeForString {
XCTAssertEqual(FLTFlashModeOff, FLTGetFLTFlashModeForString(@"off"));
XCTAssertEqual(FLTFlashModeAuto, FLTGetFLTFlashModeForString(@"auto"));
XCTAssertEqual(FLTFlashModeAlways, FLTGetFLTFlashModeForString(@"always"));
XCTAssertEqual(FLTFlashModeTorch, FLTGetFLTFlashModeForString(@"torch"));
XCTAssertEqual(FLTFlashModeInvalid, FLTGetFLTFlashModeForString(@"unknown"));
}
- (void)testFLTGetAVCaptureFlashModeForFLTFlashMode {
XCTAssertEqual(AVCaptureFlashModeOff, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeOff));
XCTAssertEqual(AVCaptureFlashModeAuto, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeAuto));
XCTAssertEqual(AVCaptureFlashModeOn, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeAlways));
XCTAssertEqual(-1, FLTGetAVCaptureFlashModeForFLTFlashMode(FLTFlashModeTorch));
}
#pragma mark - exposure mode tests
- (void)testFLTGetStringForFLTExposureMode {
XCTAssertEqualObjects(@"auto", FLTGetStringForFLTExposureMode(FLTExposureModeAuto));
XCTAssertEqualObjects(@"locked", FLTGetStringForFLTExposureMode(FLTExposureModeLocked));
XCTAssertNil(FLTGetStringForFLTExposureMode(-1));
}
- (void)testFLTGetFLTExposureModeForString {
XCTAssertEqual(FLTExposureModeAuto, FLTGetFLTExposureModeForString(@"auto"));
XCTAssertEqual(FLTExposureModeLocked, FLTGetFLTExposureModeForString(@"locked"));
XCTAssertEqual(FLTExposureModeInvalid, FLTGetFLTExposureModeForString(@"unknown"));
}
#pragma mark - focus mode tests
- (void)testFLTGetStringForFLTFocusMode {
XCTAssertEqualObjects(@"auto", FLTGetStringForFLTFocusMode(FLTFocusModeAuto));
XCTAssertEqualObjects(@"locked", FLTGetStringForFLTFocusMode(FLTFocusModeLocked));
XCTAssertNil(FLTGetStringForFLTFocusMode(-1));
}
- (void)testFLTGetFLTFocusModeForString {
XCTAssertEqual(FLTFocusModeAuto, FLTGetFLTFocusModeForString(@"auto"));
XCTAssertEqual(FLTFocusModeLocked, FLTGetFLTFocusModeForString(@"locked"));
XCTAssertEqual(FLTFocusModeInvalid, FLTGetFLTFocusModeForString(@"unknown"));
}
#pragma mark - resolution preset tests
- (void)testFLTGetFLTResolutionPresetForString {
XCTAssertEqual(FLTResolutionPresetVeryLow, FLTGetFLTResolutionPresetForString(@"veryLow"));
XCTAssertEqual(FLTResolutionPresetLow, FLTGetFLTResolutionPresetForString(@"low"));
XCTAssertEqual(FLTResolutionPresetMedium, FLTGetFLTResolutionPresetForString(@"medium"));
XCTAssertEqual(FLTResolutionPresetHigh, FLTGetFLTResolutionPresetForString(@"high"));
XCTAssertEqual(FLTResolutionPresetVeryHigh, FLTGetFLTResolutionPresetForString(@"veryHigh"));
XCTAssertEqual(FLTResolutionPresetUltraHigh, FLTGetFLTResolutionPresetForString(@"ultraHigh"));
XCTAssertEqual(FLTResolutionPresetMax, FLTGetFLTResolutionPresetForString(@"max"));
XCTAssertEqual(FLTResolutionPresetInvalid, FLTGetFLTResolutionPresetForString(@"unknown"));
}
#pragma mark - video format tests
- (void)testFLTGetVideoFormatFromString {
XCTAssertEqual(kCVPixelFormatType_32BGRA, FLTGetVideoFormatFromString(@"bgra8888"));
XCTAssertEqual(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
FLTGetVideoFormatFromString(@"yuv420"));
XCTAssertEqual(kCVPixelFormatType_32BGRA, FLTGetVideoFormatFromString(@"unknown"));
}
#pragma mark - device orientation tests
- (void)testFLTGetUIDeviceOrientationForString {
XCTAssertEqual(UIDeviceOrientationPortraitUpsideDown,
FLTGetUIDeviceOrientationForString(@"portraitDown"));
XCTAssertEqual(UIDeviceOrientationLandscapeLeft,
FLTGetUIDeviceOrientationForString(@"landscapeLeft"));
XCTAssertEqual(UIDeviceOrientationLandscapeRight,
FLTGetUIDeviceOrientationForString(@"landscapeRight"));
XCTAssertEqual(UIDeviceOrientationPortrait, FLTGetUIDeviceOrientationForString(@"portraitUp"));
XCTAssertEqual(UIDeviceOrientationUnknown, FLTGetUIDeviceOrientationForString(@"unknown"));
}
- (void)testFLTGetStringForUIDeviceOrientation {
XCTAssertEqualObjects(@"portraitDown",
FLTGetStringForUIDeviceOrientation(UIDeviceOrientationPortraitUpsideDown));
XCTAssertEqualObjects(@"landscapeLeft",
FLTGetStringForUIDeviceOrientation(UIDeviceOrientationLandscapeLeft));
XCTAssertEqualObjects(@"landscapeRight",
FLTGetStringForUIDeviceOrientation(UIDeviceOrientationLandscapeRight));
XCTAssertEqualObjects(@"portraitUp",
FLTGetStringForUIDeviceOrientation(UIDeviceOrientationPortrait));
XCTAssertEqualObjects(@"portraitUp", FLTGetStringForUIDeviceOrientation(-1));
}
#pragma mark - file format tests
- (void)testFLTGetFileFormatForString {
XCTAssertEqual(FCPFileFormatJPEG, FCPGetFileFormatFromString(@"jpg"));
XCTAssertEqual(FCPFileFormatHEIF, FCPGetFileFormatFromString(@"heif"));
XCTAssertEqual(FCPFileFormatInvalid, FCPGetFileFormatFromString(@"unknown"));
}
@end
| packages/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m/0 | {
"file_path": "packages/packages/camera/camera_avfoundation/example/ios/RunnerTests/CameraPropertiesTests.m",
"repo_id": "packages",
"token_count": 2057
} | 983 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#import "FLTCam.h"
#import "FLTCam_Test.h"
#import "FLTSavePhotoDelegate.h"
#import "QueueUtils.h"
@import CoreMotion;
#import <libkern/OSAtomic.h>
@implementation FLTImageStreamHandler
- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_captureSessionQueue = captureSessionQueue;
return self;
}
- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments {
__weak typeof(self) weakSelf = self;
dispatch_async(self.captureSessionQueue, ^{
weakSelf.eventSink = nil;
});
return nil;
}
- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments
eventSink:(nonnull FlutterEventSink)events {
__weak typeof(self) weakSelf = self;
dispatch_async(self.captureSessionQueue, ^{
weakSelf.eventSink = events;
});
return nil;
}
@end
@interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
AVCaptureAudioDataOutputSampleBufferDelegate>
@property(readonly, nonatomic) int64_t textureId;
@property BOOL enableAudio;
@property(nonatomic) FLTImageStreamHandler *imageStreamHandler;
@property(readonly, nonatomic) AVCaptureSession *videoCaptureSession;
@property(readonly, nonatomic) AVCaptureSession *audioCaptureSession;
@property(readonly, nonatomic) AVCaptureInput *captureVideoInput;
/// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback.
/// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API.
@property(readwrite, nonatomic) CVPixelBufferRef latestPixelBuffer;
@property(readonly, nonatomic) CGSize captureSize;
@property(strong, nonatomic) AVAssetWriter *videoWriter;
@property(strong, nonatomic) AVAssetWriterInput *videoWriterInput;
@property(strong, nonatomic) AVAssetWriterInput *audioWriterInput;
@property(strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor;
@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;
@property(strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;
@property(strong, nonatomic) NSString *videoRecordingPath;
@property(assign, nonatomic) BOOL isRecording;
@property(assign, nonatomic) BOOL isRecordingPaused;
@property(assign, nonatomic) BOOL videoIsDisconnected;
@property(assign, nonatomic) BOOL audioIsDisconnected;
@property(assign, nonatomic) BOOL isAudioSetup;
/// Number of frames currently pending processing.
@property(assign, nonatomic) int streamingPendingFramesCount;
/// Maximum number of frames pending processing.
@property(assign, nonatomic) int maxStreamingPendingFramesCount;
@property(assign, nonatomic) UIDeviceOrientation lockedCaptureOrientation;
@property(assign, nonatomic) CMTime lastVideoSampleTime;
@property(assign, nonatomic) CMTime lastAudioSampleTime;
@property(assign, nonatomic) CMTime videoTimeOffset;
@property(assign, nonatomic) CMTime audioTimeOffset;
@property(nonatomic) CMMotionManager *motionManager;
@property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
/// All FLTCam's state access and capture session related operations should be on run on this queue.
@property(strong, nonatomic) dispatch_queue_t captureSessionQueue;
/// The queue on which `latestPixelBuffer` property is accessed.
/// To avoid unnecessary contention, do not access `latestPixelBuffer` on the `captureSessionQueue`.
@property(strong, nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue;
/// The queue on which captured photos (not videos) are written to disk.
/// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation.
@property(strong, nonatomic) dispatch_queue_t photoIOQueue;
@property(assign, nonatomic) UIDeviceOrientation deviceOrientation;
/// A wrapper for CMVideoFormatDescriptionGetDimensions.
/// Allows for alternate implementations in tests.
@property(nonatomic, copy) VideoDimensionsForFormat videoDimensionsForFormat;
/// A wrapper for AVCaptureDevice creation to allow for dependency injection in tests.
@property(nonatomic, copy) CaptureDeviceFactory captureDeviceFactory;
@end
@implementation FLTCam
NSString *const errorMethod = @"error";
- (instancetype)initWithCameraName:(NSString *)cameraName
resolutionPreset:(NSString *)resolutionPreset
enableAudio:(BOOL)enableAudio
orientation:(UIDeviceOrientation)orientation
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error {
return [self initWithCameraName:cameraName
resolutionPreset:resolutionPreset
enableAudio:enableAudio
orientation:orientation
videoCaptureSession:[[AVCaptureSession alloc] init]
audioCaptureSession:[[AVCaptureSession alloc] init]
captureSessionQueue:captureSessionQueue
error:error];
}
- (instancetype)initWithCameraName:(NSString *)cameraName
resolutionPreset:(NSString *)resolutionPreset
enableAudio:(BOOL)enableAudio
orientation:(UIDeviceOrientation)orientation
videoCaptureSession:(AVCaptureSession *)videoCaptureSession
audioCaptureSession:(AVCaptureSession *)audioCaptureSession
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error {
return [self initWithResolutionPreset:resolutionPreset
enableAudio:enableAudio
orientation:orientation
videoCaptureSession:videoCaptureSession
audioCaptureSession:videoCaptureSession
captureSessionQueue:captureSessionQueue
captureDeviceFactory:^AVCaptureDevice *(void) {
return [AVCaptureDevice deviceWithUniqueID:cameraName];
}
videoDimensionsForFormat:^CMVideoDimensions(AVCaptureDeviceFormat *format) {
return CMVideoFormatDescriptionGetDimensions(format.formatDescription);
}
error:error];
}
- (instancetype)initWithResolutionPreset:(NSString *)resolutionPreset
enableAudio:(BOOL)enableAudio
orientation:(UIDeviceOrientation)orientation
videoCaptureSession:(AVCaptureSession *)videoCaptureSession
audioCaptureSession:(AVCaptureSession *)audioCaptureSession
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
captureDeviceFactory:(CaptureDeviceFactory)captureDeviceFactory
videoDimensionsForFormat:(VideoDimensionsForFormat)videoDimensionsForFormat
error:(NSError **)error {
self = [super init];
NSAssert(self, @"super init cannot be nil");
_resolutionPreset = FLTGetFLTResolutionPresetForString(resolutionPreset);
if (_resolutionPreset == FLTResolutionPresetInvalid) {
*error = [NSError
errorWithDomain:NSCocoaErrorDomain
code:NSURLErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey :
[NSString stringWithFormat:@"Unknown resolution preset %@", resolutionPreset]
}];
return nil;
}
_enableAudio = enableAudio;
_captureSessionQueue = captureSessionQueue;
_pixelBufferSynchronizationQueue =
dispatch_queue_create("io.flutter.camera.pixelBufferSynchronizationQueue", NULL);
_photoIOQueue = dispatch_queue_create("io.flutter.camera.photoIOQueue", NULL);
_videoCaptureSession = videoCaptureSession;
_audioCaptureSession = audioCaptureSession;
_captureDeviceFactory = captureDeviceFactory;
_captureDevice = captureDeviceFactory();
_videoDimensionsForFormat = videoDimensionsForFormat;
_flashMode = _captureDevice.hasFlash ? FLTFlashModeAuto : FLTFlashModeOff;
_exposureMode = FLTExposureModeAuto;
_focusMode = FLTFocusModeAuto;
_lockedCaptureOrientation = UIDeviceOrientationUnknown;
_deviceOrientation = orientation;
_videoFormat = kCVPixelFormatType_32BGRA;
_inProgressSavePhotoDelegates = [NSMutableDictionary dictionary];
_fileFormat = FCPFileFormatJPEG;
// To limit memory consumption, limit the number of frames pending processing.
// After some testing, 4 was determined to be the best maximum value.
// https://github.com/flutter/plugins/pull/4520#discussion_r766335637
_maxStreamingPendingFramesCount = 4;
NSError *localError = nil;
AVCaptureConnection *connection = [self createConnection:&localError];
if (localError) {
*error = localError;
return nil;
}
[_videoCaptureSession addInputWithNoConnections:_captureVideoInput];
[_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput];
[_videoCaptureSession addConnection:connection];
_capturePhotoOutput = [AVCapturePhotoOutput new];
[_capturePhotoOutput setHighResolutionCaptureEnabled:YES];
[_videoCaptureSession addOutput:_capturePhotoOutput];
_motionManager = [[CMMotionManager alloc] init];
[_motionManager startAccelerometerUpdates];
if (![self setCaptureSessionPreset:_resolutionPreset withError:error]) {
return nil;
}
[self updateOrientation];
return self;
}
- (AVCaptureConnection *)createConnection:(NSError **)error {
// Setup video capture input.
_captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:error];
if (*error) {
return nil;
}
// Setup video capture output.
_captureVideoOutput = [AVCaptureVideoDataOutput new];
_captureVideoOutput.videoSettings =
@{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)};
[_captureVideoOutput setAlwaysDiscardsLateVideoFrames:YES];
[_captureVideoOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
// Setup video capture connection.
AVCaptureConnection *connection =
[AVCaptureConnection connectionWithInputPorts:_captureVideoInput.ports
output:_captureVideoOutput];
if ([_captureDevice position] == AVCaptureDevicePositionFront) {
connection.videoMirrored = YES;
}
return connection;
}
- (void)start {
[_videoCaptureSession startRunning];
[_audioCaptureSession startRunning];
}
- (void)stop {
[_videoCaptureSession stopRunning];
[_audioCaptureSession stopRunning];
}
- (void)setVideoFormat:(OSType)videoFormat {
_videoFormat = videoFormat;
_captureVideoOutput.videoSettings =
@{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(videoFormat)};
}
- (void)setImageFileFormat:(FCPFileFormat)fileFormat {
_fileFormat = fileFormat;
}
- (void)setDeviceOrientation:(UIDeviceOrientation)orientation {
if (_deviceOrientation == orientation) {
return;
}
_deviceOrientation = orientation;
[self updateOrientation];
}
- (void)updateOrientation {
if (_isRecording) {
return;
}
UIDeviceOrientation orientation = (_lockedCaptureOrientation != UIDeviceOrientationUnknown)
? _lockedCaptureOrientation
: _deviceOrientation;
[self updateOrientation:orientation forCaptureOutput:_capturePhotoOutput];
[self updateOrientation:orientation forCaptureOutput:_captureVideoOutput];
}
- (void)updateOrientation:(UIDeviceOrientation)orientation
forCaptureOutput:(AVCaptureOutput *)captureOutput {
if (!captureOutput) {
return;
}
AVCaptureConnection *connection = [captureOutput connectionWithMediaType:AVMediaTypeVideo];
if (connection && connection.isVideoOrientationSupported) {
connection.videoOrientation = [self getVideoOrientationForDeviceOrientation:orientation];
}
}
- (void)captureToFile:(FLTThreadSafeFlutterResult *)result {
AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
if (_resolutionPreset == FLTResolutionPresetMax) {
[settings setHighResolutionPhotoEnabled:YES];
}
NSString *extension;
BOOL isHEVCCodecAvailable =
[self.capturePhotoOutput.availablePhotoCodecTypes containsObject:AVVideoCodecTypeHEVC];
if (_fileFormat == FCPFileFormatHEIF && isHEVCCodecAvailable) {
settings =
[AVCapturePhotoSettings photoSettingsWithFormat:@{AVVideoCodecKey : AVVideoCodecTypeHEVC}];
extension = @"heif";
} else {
extension = @"jpg";
}
AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(_flashMode);
if (avFlashMode != -1) {
[settings setFlashMode:avFlashMode];
}
NSError *error;
NSString *path = [self getTemporaryFilePathWithExtension:extension
subfolder:@"pictures"
prefix:@"CAP_"
error:error];
if (error) {
[result sendError:error];
return;
}
__weak typeof(self) weakSelf = self;
FLTSavePhotoDelegate *savePhotoDelegate = [[FLTSavePhotoDelegate alloc]
initWithPath:path
ioQueue:self.photoIOQueue
completionHandler:^(NSString *_Nullable path, NSError *_Nullable error) {
typeof(self) strongSelf = weakSelf;
if (!strongSelf) return;
dispatch_async(strongSelf.captureSessionQueue, ^{
// cannot use the outter `strongSelf`
typeof(self) strongSelf = weakSelf;
if (!strongSelf) return;
[strongSelf.inProgressSavePhotoDelegates removeObjectForKey:@(settings.uniqueID)];
});
if (error) {
[result sendError:error];
} else {
NSAssert(path, @"Path must not be nil if no error.");
[result sendSuccessWithData:path];
}
}];
NSAssert(dispatch_get_specific(FLTCaptureSessionQueueSpecific),
@"save photo delegate references must be updated on the capture session queue");
self.inProgressSavePhotoDelegates[@(settings.uniqueID)] = savePhotoDelegate;
[self.capturePhotoOutput capturePhotoWithSettings:settings delegate:savePhotoDelegate];
}
- (AVCaptureVideoOrientation)getVideoOrientationForDeviceOrientation:
(UIDeviceOrientation)deviceOrientation {
if (deviceOrientation == UIDeviceOrientationPortrait) {
return AVCaptureVideoOrientationPortrait;
} else if (deviceOrientation == UIDeviceOrientationLandscapeLeft) {
// Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation
// is landscape left the video orientation should be landscape right.
return AVCaptureVideoOrientationLandscapeRight;
} else if (deviceOrientation == UIDeviceOrientationLandscapeRight) {
// Note: device orientation is flipped compared to video orientation. When UIDeviceOrientation
// is landscape right the video orientation should be landscape left.
return AVCaptureVideoOrientationLandscapeLeft;
} else if (deviceOrientation == UIDeviceOrientationPortraitUpsideDown) {
return AVCaptureVideoOrientationPortraitUpsideDown;
} else {
return AVCaptureVideoOrientationPortrait;
}
}
- (NSString *)getTemporaryFilePathWithExtension:(NSString *)extension
subfolder:(NSString *)subfolder
prefix:(NSString *)prefix
error:(NSError *)error {
NSString *docDir =
NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
NSString *fileDir =
[[docDir stringByAppendingPathComponent:@"camera"] stringByAppendingPathComponent:subfolder];
NSString *fileName = [prefix stringByAppendingString:[[NSUUID UUID] UUIDString]];
NSString *file =
[[fileDir stringByAppendingPathComponent:fileName] stringByAppendingPathExtension:extension];
NSFileManager *fm = [NSFileManager defaultManager];
if (![fm fileExistsAtPath:fileDir]) {
[[NSFileManager defaultManager] createDirectoryAtPath:fileDir
withIntermediateDirectories:true
attributes:nil
error:&error];
if (error) {
return nil;
}
}
return file;
}
- (BOOL)setCaptureSessionPreset:(FLTResolutionPreset)resolutionPreset withError:(NSError **)error {
switch (resolutionPreset) {
case FLTResolutionPresetMax: {
AVCaptureDeviceFormat *bestFormat =
[self highestResolutionFormatForCaptureDevice:_captureDevice];
if (bestFormat) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
if ([_captureDevice lockForConfiguration:NULL]) {
// Set the best device format found and finish the device configuration.
_captureDevice.activeFormat = bestFormat;
[_captureDevice unlockForConfiguration];
// Set the preview size based on values from the current capture device.
_previewSize =
CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width,
_captureDevice.activeFormat.highResolutionStillImageDimensions.height);
break;
}
}
}
case FLTResolutionPresetUltraHigh:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset3840x2160;
_previewSize = CGSizeMake(3840, 2160);
break;
}
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh;
_previewSize =
CGSizeMake(_captureDevice.activeFormat.highResolutionStillImageDimensions.width,
_captureDevice.activeFormat.highResolutionStillImageDimensions.height);
break;
}
case FLTResolutionPresetVeryHigh:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset1920x1080;
_previewSize = CGSizeMake(1920, 1080);
break;
}
case FLTResolutionPresetHigh:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset1280x720;
_previewSize = CGSizeMake(1280, 720);
break;
}
case FLTResolutionPresetMedium:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset640x480;
_previewSize = CGSizeMake(640, 480);
break;
}
case FLTResolutionPresetLow:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPreset352x288]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPreset352x288;
_previewSize = CGSizeMake(352, 288);
break;
}
default:
if ([_videoCaptureSession canSetSessionPreset:AVCaptureSessionPresetLow]) {
_videoCaptureSession.sessionPreset = AVCaptureSessionPresetLow;
_previewSize = CGSizeMake(352, 288);
} else {
*error = [NSError errorWithDomain:NSCocoaErrorDomain
code:NSURLErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey :
@"No capture session available for current capture session."
}];
return NO;
}
}
_audioCaptureSession.sessionPreset = _videoCaptureSession.sessionPreset;
return YES;
}
/// Finds the highest available resolution in terms of pixel count for the given device.
- (AVCaptureDeviceFormat *)highestResolutionFormatForCaptureDevice:
(AVCaptureDevice *)captureDevice {
AVCaptureDeviceFormat *bestFormat = nil;
NSUInteger maxPixelCount = 0;
for (AVCaptureDeviceFormat *format in _captureDevice.formats) {
CMVideoDimensions res = self.videoDimensionsForFormat(format);
NSUInteger height = res.height;
NSUInteger width = res.width;
NSUInteger pixelCount = height * width;
if (pixelCount > maxPixelCount) {
maxPixelCount = pixelCount;
bestFormat = format;
}
}
return bestFormat;
}
- (void)captureOutput:(AVCaptureOutput *)output
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
if (output == _captureVideoOutput) {
CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CFRetain(newBuffer);
__block CVPixelBufferRef previousPixelBuffer = nil;
// Use `dispatch_sync` to avoid unnecessary context switch under common non-contest scenarios;
// Under rare contest scenarios, it will not block for too long since the critical section is
// quite lightweight.
dispatch_sync(self.pixelBufferSynchronizationQueue, ^{
// No need weak self because it's dispatch_sync.
previousPixelBuffer = self.latestPixelBuffer;
self.latestPixelBuffer = newBuffer;
});
if (previousPixelBuffer) {
CFRelease(previousPixelBuffer);
}
if (_onFrameAvailable) {
_onFrameAvailable();
}
}
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
[_methodChannel invokeMethod:errorMethod
arguments:@"sample buffer is not ready. Skipping sample"];
return;
}
if (_isStreamingImages) {
FlutterEventSink eventSink = _imageStreamHandler.eventSink;
if (eventSink && (self.streamingPendingFramesCount < self.maxStreamingPendingFramesCount)) {
self.streamingPendingFramesCount++;
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Must lock base address before accessing the pixel data
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
size_t imageWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t imageHeight = CVPixelBufferGetHeight(pixelBuffer);
NSMutableArray *planes = [NSMutableArray array];
const Boolean isPlanar = CVPixelBufferIsPlanar(pixelBuffer);
size_t planeCount;
if (isPlanar) {
planeCount = CVPixelBufferGetPlaneCount(pixelBuffer);
} else {
planeCount = 1;
}
for (int i = 0; i < planeCount; i++) {
void *planeAddress;
size_t bytesPerRow;
size_t height;
size_t width;
if (isPlanar) {
planeAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, i);
bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, i);
height = CVPixelBufferGetHeightOfPlane(pixelBuffer, i);
width = CVPixelBufferGetWidthOfPlane(pixelBuffer, i);
} else {
planeAddress = CVPixelBufferGetBaseAddress(pixelBuffer);
bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
height = CVPixelBufferGetHeight(pixelBuffer);
width = CVPixelBufferGetWidth(pixelBuffer);
}
NSNumber *length = @(bytesPerRow * height);
NSData *bytes = [NSData dataWithBytes:planeAddress length:length.unsignedIntegerValue];
NSMutableDictionary *planeBuffer = [NSMutableDictionary dictionary];
planeBuffer[@"bytesPerRow"] = @(bytesPerRow);
planeBuffer[@"width"] = @(width);
planeBuffer[@"height"] = @(height);
planeBuffer[@"bytes"] = [FlutterStandardTypedData typedDataWithBytes:bytes];
[planes addObject:planeBuffer];
}
// Lock the base address before accessing pixel data, and unlock it afterwards.
// Done accessing the `pixelBuffer` at this point.
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
NSMutableDictionary *imageBuffer = [NSMutableDictionary dictionary];
imageBuffer[@"width"] = [NSNumber numberWithUnsignedLong:imageWidth];
imageBuffer[@"height"] = [NSNumber numberWithUnsignedLong:imageHeight];
imageBuffer[@"format"] = @(_videoFormat);
imageBuffer[@"planes"] = planes;
imageBuffer[@"lensAperture"] = [NSNumber numberWithFloat:[_captureDevice lensAperture]];
Float64 exposureDuration = CMTimeGetSeconds([_captureDevice exposureDuration]);
Float64 nsExposureDuration = 1000000000 * exposureDuration;
imageBuffer[@"sensorExposureTime"] = [NSNumber numberWithInt:nsExposureDuration];
imageBuffer[@"sensorSensitivity"] = [NSNumber numberWithFloat:[_captureDevice ISO]];
dispatch_async(dispatch_get_main_queue(), ^{
eventSink(imageBuffer);
});
}
}
if (_isRecording && !_isRecordingPaused) {
if (_videoWriter.status == AVAssetWriterStatusFailed) {
[_methodChannel invokeMethod:errorMethod
arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
return;
}
// ignore audio samples until the first video sample arrives to avoid black frames
// https://github.com/flutter/flutter/issues/57831
if (_videoWriter.status != AVAssetWriterStatusWriting && output != _captureVideoOutput) {
return;
}
CMTime currentSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (_videoWriter.status != AVAssetWriterStatusWriting) {
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:currentSampleTime];
}
if (output == _captureVideoOutput) {
if (_videoIsDisconnected) {
_videoIsDisconnected = NO;
if (_videoTimeOffset.value == 0) {
_videoTimeOffset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
} else {
CMTime offset = CMTimeSubtract(currentSampleTime, _lastVideoSampleTime);
_videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset);
}
return;
}
_lastVideoSampleTime = currentSampleTime;
CVPixelBufferRef nextBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CMTime nextSampleTime = CMTimeSubtract(_lastVideoSampleTime, _videoTimeOffset);
[_videoAdaptor appendPixelBuffer:nextBuffer withPresentationTime:nextSampleTime];
} else {
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0) {
currentSampleTime = CMTimeAdd(currentSampleTime, dur);
}
if (_audioIsDisconnected) {
_audioIsDisconnected = NO;
if (_audioTimeOffset.value == 0) {
_audioTimeOffset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
} else {
CMTime offset = CMTimeSubtract(currentSampleTime, _lastAudioSampleTime);
_audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset);
}
return;
}
_lastAudioSampleTime = currentSampleTime;
if (_audioTimeOffset.value != 0) {
CMSampleBufferRef adjustedSampleBuffer =
[self copySampleBufferWithAdjustedTime:sampleBuffer by:_audioTimeOffset];
[self newAudioSample:adjustedSampleBuffer];
CFRelease(adjustedSampleBuffer);
} else {
[self newAudioSample:sampleBuffer];
}
}
}
}
- (CMSampleBufferRef)copySampleBufferWithAdjustedTime:(CMSampleBufferRef)sample by:(CMTime)offset {
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo *pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++) {
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}
- (void)newVideoSample:(CMSampleBufferRef)sampleBuffer {
if (_videoWriter.status != AVAssetWriterStatusWriting) {
if (_videoWriter.status == AVAssetWriterStatusFailed) {
[_methodChannel invokeMethod:errorMethod
arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
}
return;
}
if (_videoWriterInput.readyForMoreMediaData) {
if (![_videoWriterInput appendSampleBuffer:sampleBuffer]) {
[_methodChannel
invokeMethod:errorMethod
arguments:[NSString stringWithFormat:@"%@", @"Unable to write to video input"]];
}
}
}
- (void)newAudioSample:(CMSampleBufferRef)sampleBuffer {
if (_videoWriter.status != AVAssetWriterStatusWriting) {
if (_videoWriter.status == AVAssetWriterStatusFailed) {
[_methodChannel invokeMethod:errorMethod
arguments:[NSString stringWithFormat:@"%@", _videoWriter.error]];
}
return;
}
if (_audioWriterInput.readyForMoreMediaData) {
if (![_audioWriterInput appendSampleBuffer:sampleBuffer]) {
[_methodChannel
invokeMethod:errorMethod
arguments:[NSString stringWithFormat:@"%@", @"Unable to write to audio input"]];
}
}
}
- (void)close {
[self stop];
for (AVCaptureInput *input in [_videoCaptureSession inputs]) {
[_videoCaptureSession removeInput:input];
}
for (AVCaptureOutput *output in [_videoCaptureSession outputs]) {
[_videoCaptureSession removeOutput:output];
}
for (AVCaptureInput *input in [_audioCaptureSession inputs]) {
[_audioCaptureSession removeInput:input];
}
for (AVCaptureOutput *output in [_audioCaptureSession outputs]) {
[_audioCaptureSession removeOutput:output];
}
}
- (void)dealloc {
if (_latestPixelBuffer) {
CFRelease(_latestPixelBuffer);
}
[_motionManager stopAccelerometerUpdates];
}
- (CVPixelBufferRef)copyPixelBuffer {
__block CVPixelBufferRef pixelBuffer = nil;
// Use `dispatch_sync` because `copyPixelBuffer` API requires synchronous return.
dispatch_sync(self.pixelBufferSynchronizationQueue, ^{
// No need weak self because it's dispatch_sync.
pixelBuffer = self.latestPixelBuffer;
self.latestPixelBuffer = nil;
});
return pixelBuffer;
}
- (void)startVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
[self startVideoRecordingWithResult:result messengerForStreaming:nil];
}
- (void)startVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result
messengerForStreaming:(nullable NSObject<FlutterBinaryMessenger> *)messenger {
if (!_isRecording) {
if (messenger != nil) {
[self startImageStreamWithMessenger:messenger];
}
NSError *error;
_videoRecordingPath = [self getTemporaryFilePathWithExtension:@"mp4"
subfolder:@"videos"
prefix:@"REC_"
error:error];
if (error) {
[result sendError:error];
return;
}
if (![self setupWriterForPath:_videoRecordingPath]) {
[result sendErrorWithCode:@"IOError" message:@"Setup Writer Failed" details:nil];
return;
}
_isRecording = YES;
_isRecordingPaused = NO;
_videoTimeOffset = CMTimeMake(0, 1);
_audioTimeOffset = CMTimeMake(0, 1);
_videoIsDisconnected = NO;
_audioIsDisconnected = NO;
[result sendSuccess];
} else {
[result sendErrorWithCode:@"Error" message:@"Video is already recording" details:nil];
}
}
- (void)stopVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
if (_isRecording) {
_isRecording = NO;
if (_videoWriter.status != AVAssetWriterStatusUnknown) {
[_videoWriter finishWritingWithCompletionHandler:^{
if (self->_videoWriter.status == AVAssetWriterStatusCompleted) {
[self updateOrientation];
[result sendSuccessWithData:self->_videoRecordingPath];
self->_videoRecordingPath = nil;
} else {
[result sendErrorWithCode:@"IOError"
message:@"AVAssetWriter could not finish writing!"
details:nil];
}
}];
}
} else {
NSError *error =
[NSError errorWithDomain:NSCocoaErrorDomain
code:NSURLErrorResourceUnavailable
userInfo:@{NSLocalizedDescriptionKey : @"Video is not recording!"}];
[result sendError:error];
}
}
- (void)pauseVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
_isRecordingPaused = YES;
_videoIsDisconnected = YES;
_audioIsDisconnected = YES;
[result sendSuccess];
}
- (void)resumeVideoRecordingWithResult:(FLTThreadSafeFlutterResult *)result {
_isRecordingPaused = NO;
[result sendSuccess];
}
- (void)lockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result
orientation:(NSString *)orientationStr {
UIDeviceOrientation orientation = FLTGetUIDeviceOrientationForString(orientationStr);
// "Unknown" should never be sent, so is used to represent an unexpected
// value.
if (orientation == UIDeviceOrientationUnknown) {
[result sendError:[NSError errorWithDomain:NSCocoaErrorDomain
code:NSURLErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey : [NSString
stringWithFormat:@"Unknown device orientation %@",
orientationStr]
}]];
return;
}
if (_lockedCaptureOrientation != orientation) {
_lockedCaptureOrientation = orientation;
[self updateOrientation];
}
[result sendSuccess];
}
- (void)unlockCaptureOrientationWithResult:(FLTThreadSafeFlutterResult *)result {
_lockedCaptureOrientation = UIDeviceOrientationUnknown;
[self updateOrientation];
[result sendSuccess];
}
- (void)setFlashModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
FLTFlashMode mode = FLTGetFLTFlashModeForString(modeStr);
if (mode == FLTFlashModeInvalid) {
[result sendError:[NSError errorWithDomain:NSCocoaErrorDomain
code:NSURLErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey : [NSString
stringWithFormat:@"Unknown flash mode %@", modeStr]
}]];
return;
}
if (mode == FLTFlashModeTorch) {
if (!_captureDevice.hasTorch) {
[result sendErrorWithCode:@"setFlashModeFailed"
message:@"Device does not support torch mode"
details:nil];
return;
}
if (!_captureDevice.isTorchAvailable) {
[result sendErrorWithCode:@"setFlashModeFailed"
message:@"Torch mode is currently not available"
details:nil];
return;
}
if (_captureDevice.torchMode != AVCaptureTorchModeOn) {
[_captureDevice lockForConfiguration:nil];
[_captureDevice setTorchMode:AVCaptureTorchModeOn];
[_captureDevice unlockForConfiguration];
}
} else {
if (!_captureDevice.hasFlash) {
[result sendErrorWithCode:@"setFlashModeFailed"
message:@"Device does not have flash capabilities"
details:nil];
return;
}
AVCaptureFlashMode avFlashMode = FLTGetAVCaptureFlashModeForFLTFlashMode(mode);
if (![_capturePhotoOutput.supportedFlashModes
containsObject:[NSNumber numberWithInt:((int)avFlashMode)]]) {
[result sendErrorWithCode:@"setFlashModeFailed"
message:@"Device does not support this specific flash mode"
details:nil];
return;
}
if (_captureDevice.torchMode != AVCaptureTorchModeOff) {
[_captureDevice lockForConfiguration:nil];
[_captureDevice setTorchMode:AVCaptureTorchModeOff];
[_captureDevice unlockForConfiguration];
}
}
_flashMode = mode;
[result sendSuccess];
}
- (void)setExposureModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
FLTExposureMode mode = FLTGetFLTExposureModeForString(modeStr);
if (mode == FLTExposureModeInvalid) {
[result sendError:[NSError errorWithDomain:NSCocoaErrorDomain
code:NSURLErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey : [NSString
stringWithFormat:@"Unknown exposure mode %@", modeStr]
}]];
return;
}
_exposureMode = mode;
[self applyExposureMode];
[result sendSuccess];
}
- (void)applyExposureMode {
[_captureDevice lockForConfiguration:nil];
switch (_exposureMode) {
case FLTExposureModeLocked:
[_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
break;
case FLTExposureModeAuto:
if ([_captureDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
[_captureDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
} else {
[_captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
}
break;
case FLTExposureModeInvalid:
// This state is not intended to be reachable; it exists only for error handling during
// message deserialization.
NSAssert(false, @"");
break;
}
[_captureDevice unlockForConfiguration];
}
- (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr {
FLTFocusMode mode = FLTGetFLTFocusModeForString(modeStr);
if (mode == FLTFocusModeInvalid) {
[result sendError:[NSError errorWithDomain:NSCocoaErrorDomain
code:NSURLErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey : [NSString
stringWithFormat:@"Unknown focus mode %@", modeStr]
}]];
return;
}
_focusMode = mode;
[self applyFocusMode];
[result sendSuccess];
}
- (void)applyFocusMode {
[self applyFocusMode:_focusMode onDevice:_captureDevice];
}
- (void)applyFocusMode:(FLTFocusMode)focusMode onDevice:(AVCaptureDevice *)captureDevice {
[captureDevice lockForConfiguration:nil];
switch (focusMode) {
case FLTFocusModeLocked:
if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
[captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
}
break;
case FLTFocusModeAuto:
if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
[captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
} else if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
[captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
}
break;
case FLTFocusModeInvalid:
// This state is not intended to be reachable; it exists only for error handling during
// message deserialization.
NSAssert(false, @"");
break;
}
[captureDevice unlockForConfiguration];
}
- (void)pausePreviewWithResult:(FLTThreadSafeFlutterResult *)result {
_isPreviewPaused = true;
[result sendSuccess];
}
- (void)resumePreviewWithResult:(FLTThreadSafeFlutterResult *)result {
_isPreviewPaused = false;
[result sendSuccess];
}
- (void)setDescriptionWhileRecording:(NSString *)cameraName
result:(FLTThreadSafeFlutterResult *)result {
if (!_isRecording) {
[result sendErrorWithCode:@"setDescriptionWhileRecordingFailed"
message:@"Device was not recording"
details:nil];
return;
}
_captureDevice = self.captureDeviceFactory();
AVCaptureConnection *oldConnection =
[_captureVideoOutput connectionWithMediaType:AVMediaTypeVideo];
// Stop video capture from the old output.
[_captureVideoOutput setSampleBufferDelegate:nil queue:nil];
// Remove the old video capture connections.
[_videoCaptureSession beginConfiguration];
[_videoCaptureSession removeInput:_captureVideoInput];
[_videoCaptureSession removeOutput:_captureVideoOutput];
NSError *error = nil;
AVCaptureConnection *newConnection = [self createConnection:&error];
if (error) {
[result sendError:error];
return;
}
// Keep the same orientation the old connections had.
if (oldConnection && newConnection.isVideoOrientationSupported) {
newConnection.videoOrientation = oldConnection.videoOrientation;
}
// Add the new connections to the session.
if (![_videoCaptureSession canAddInput:_captureVideoInput])
[result sendErrorWithCode:@"VideoError" message:@"Unable switch video input" details:nil];
[_videoCaptureSession addInputWithNoConnections:_captureVideoInput];
if (![_videoCaptureSession canAddOutput:_captureVideoOutput])
[result sendErrorWithCode:@"VideoError" message:@"Unable switch video output" details:nil];
[_videoCaptureSession addOutputWithNoConnections:_captureVideoOutput];
if (![_videoCaptureSession canAddConnection:newConnection])
[result sendErrorWithCode:@"VideoError" message:@"Unable switch video connection" details:nil];
[_videoCaptureSession addConnection:newConnection];
[_videoCaptureSession commitConfiguration];
[result sendSuccess];
}
- (CGPoint)getCGPointForCoordsWithOrientation:(UIDeviceOrientation)orientation
x:(double)x
y:(double)y {
double oldX = x, oldY = y;
switch (orientation) {
case UIDeviceOrientationPortrait: // 90 ccw
y = 1 - oldX;
x = oldY;
break;
case UIDeviceOrientationPortraitUpsideDown: // 90 cw
x = 1 - oldY;
y = oldX;
break;
case UIDeviceOrientationLandscapeRight: // 180
x = 1 - x;
y = 1 - y;
break;
case UIDeviceOrientationLandscapeLeft:
default:
// No rotation required
break;
}
return CGPointMake(x, y);
}
- (void)setExposurePointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y {
if (!_captureDevice.isExposurePointOfInterestSupported) {
[result sendErrorWithCode:@"setExposurePointFailed"
message:@"Device does not have exposure point capabilities"
details:nil];
return;
}
UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
[_captureDevice lockForConfiguration:nil];
[_captureDevice setExposurePointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
x:x
y:y]];
[_captureDevice unlockForConfiguration];
// Retrigger auto exposure
[self applyExposureMode];
[result sendSuccess];
}
- (void)setFocusPointWithResult:(FLTThreadSafeFlutterResult *)result x:(double)x y:(double)y {
if (!_captureDevice.isFocusPointOfInterestSupported) {
[result sendErrorWithCode:@"setFocusPointFailed"
message:@"Device does not have focus point capabilities"
details:nil];
return;
}
UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
[_captureDevice lockForConfiguration:nil];
[_captureDevice setFocusPointOfInterest:[self getCGPointForCoordsWithOrientation:orientation
x:x
y:y]];
[_captureDevice unlockForConfiguration];
// Retrigger auto focus
[self applyFocusMode];
[result sendSuccess];
}
- (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset:(double)offset {
[_captureDevice lockForConfiguration:nil];
[_captureDevice setExposureTargetBias:offset completionHandler:nil];
[_captureDevice unlockForConfiguration];
[result sendSuccessWithData:@(offset)];
}
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger {
[self startImageStreamWithMessenger:messenger
imageStreamHandler:[[FLTImageStreamHandler alloc]
initWithCaptureSessionQueue:_captureSessionQueue]];
}
- (void)startImageStreamWithMessenger:(NSObject<FlutterBinaryMessenger> *)messenger
imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler {
if (!_isStreamingImages) {
FlutterEventChannel *eventChannel = [FlutterEventChannel
eventChannelWithName:@"plugins.flutter.io/camera_avfoundation/imageStream"
binaryMessenger:messenger];
FLTThreadSafeEventChannel *threadSafeEventChannel =
[[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel];
_imageStreamHandler = imageStreamHandler;
__weak typeof(self) weakSelf = self;
[threadSafeEventChannel setStreamHandler:_imageStreamHandler
completion:^{
typeof(self) strongSelf = weakSelf;
if (!strongSelf) return;
dispatch_async(strongSelf.captureSessionQueue, ^{
// cannot use the outter strongSelf
typeof(self) strongSelf = weakSelf;
if (!strongSelf) return;
strongSelf.isStreamingImages = YES;
strongSelf.streamingPendingFramesCount = 0;
});
}];
} else {
[_methodChannel invokeMethod:errorMethod
arguments:@"Images from camera are already streaming!"];
}
}
- (void)stopImageStream {
if (_isStreamingImages) {
_isStreamingImages = NO;
_imageStreamHandler = nil;
} else {
[_methodChannel invokeMethod:errorMethod arguments:@"Images from camera are not streaming!"];
}
}
- (void)receivedImageStreamData {
self.streamingPendingFramesCount--;
}
- (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor];
[result sendSuccessWithData:[NSNumber numberWithFloat:maxZoomFactor]];
}
- (void)getMinZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result {
CGFloat minZoomFactor = [self getMinAvailableZoomFactor];
[result sendSuccessWithData:[NSNumber numberWithFloat:minZoomFactor]];
}
- (void)setZoomLevel:(CGFloat)zoom Result:(FLTThreadSafeFlutterResult *)result {
CGFloat maxAvailableZoomFactor = [self getMaxAvailableZoomFactor];
CGFloat minAvailableZoomFactor = [self getMinAvailableZoomFactor];
if (maxAvailableZoomFactor < zoom || minAvailableZoomFactor > zoom) {
NSString *errorMessage = [NSString
stringWithFormat:@"Zoom level out of bounds (zoom level should be between %f and %f).",
minAvailableZoomFactor, maxAvailableZoomFactor];
[result sendErrorWithCode:@"ZOOM_ERROR" message:errorMessage details:nil];
return;
}
NSError *error = nil;
if (![_captureDevice lockForConfiguration:&error]) {
[result sendError:error];
return;
}
_captureDevice.videoZoomFactor = zoom;
[_captureDevice unlockForConfiguration];
[result sendSuccess];
}
- (CGFloat)getMinAvailableZoomFactor {
return _captureDevice.minAvailableVideoZoomFactor;
}
- (CGFloat)getMaxAvailableZoomFactor {
return _captureDevice.maxAvailableVideoZoomFactor;
}
- (BOOL)setupWriterForPath:(NSString *)path {
NSError *error = nil;
NSURL *outputURL;
if (path != nil) {
outputURL = [NSURL fileURLWithPath:path];
} else {
return NO;
}
if (_enableAudio && !_isAudioSetup) {
[self setUpCaptureSessionForAudio];
}
_videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(_videoWriter);
if (error) {
[_methodChannel invokeMethod:errorMethod arguments:error.description];
return NO;
}
NSDictionary *videoSettings = [_captureVideoOutput
recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeMPEG4];
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
_videoAdaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput
sourcePixelBufferAttributes:@{
(NSString *)kCVPixelBufferPixelFormatTypeKey : @(_videoFormat)
}];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
if (_enableAudio) {
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary *audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
audioOutputSettings = @{
AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatMPEG4AAC],
AVSampleRateKey : [NSNumber numberWithFloat:44100.0],
AVNumberOfChannelsKey : [NSNumber numberWithInt:1],
AVChannelLayoutKey : [NSData dataWithBytes:&acl length:sizeof(acl)],
};
_audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:audioOutputSettings];
_audioWriterInput.expectsMediaDataInRealTime = YES;
[_videoWriter addInput:_audioWriterInput];
[_audioOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
}
if (_flashMode == FLTFlashModeTorch) {
[self.captureDevice lockForConfiguration:nil];
[self.captureDevice setTorchMode:AVCaptureTorchModeOn];
[self.captureDevice unlockForConfiguration];
}
[_videoWriter addInput:_videoWriterInput];
[_captureVideoOutput setSampleBufferDelegate:self queue:_captureSessionQueue];
return YES;
}
- (void)setUpCaptureSessionForAudio {
// Don't setup audio twice or we will lose the audio.
if (_isAudioSetup) {
return;
}
NSError *error = nil;
// Create a device input with the device and add it to the session.
// Setup the audio input.
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice
error:&error];
if (error) {
[_methodChannel invokeMethod:errorMethod arguments:error.description];
}
// Setup the audio output.
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
if ([_audioCaptureSession canAddInput:audioInput]) {
[_audioCaptureSession addInput:audioInput];
if ([_audioCaptureSession canAddOutput:_audioOutput]) {
[_audioCaptureSession addOutput:_audioOutput];
_isAudioSetup = YES;
} else {
[_methodChannel invokeMethod:errorMethod
arguments:@"Unable to add Audio input/output to session capture"];
_isAudioSetup = NO;
}
}
}
@end
| packages/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m/0 | {
"file_path": "packages/packages/camera/camera_avfoundation/ios/Classes/FLTCam.m",
"repo_id": "packages",
"token_count": 20132
} | 984 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSPrivacyTrackingDomains</key>
<array/>
<key>NSPrivacyAccessedAPITypes</key>
<array/>
<key>NSPrivacyCollectedDataTypes</key>
<array/>
<key>NSPrivacyTracking</key>
<false/>
</dict>
</plist>
| packages/packages/camera/camera_avfoundation/ios/Resources/PrivacyInfo.xcprivacy/0 | {
"file_path": "packages/packages/camera/camera_avfoundation/ios/Resources/PrivacyInfo.xcprivacy",
"repo_id": "packages",
"token_count": 169
} | 985 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:flutter/foundation.dart';
import 'camera_image_data.dart';
/// Options wrapper for [CameraPlatform.startVideoCapturing] parameters.
@immutable
class VideoCaptureOptions {
/// Constructs a new instance.
const VideoCaptureOptions(
this.cameraId, {
this.maxDuration,
this.streamCallback,
this.streamOptions,
}) : assert(
streamOptions == null || streamCallback != null,
'Must specify streamCallback if providing streamOptions.',
);
/// The ID of the camera to use for capturing.
final int cameraId;
/// The maximum time to perform capturing for.
///
/// By default there is no maximum on the capture time.
final Duration? maxDuration;
/// An optional callback to enable streaming.
///
/// If set, then each image captured by the camera will be
/// passed to this callback.
final void Function(CameraImageData image)? streamCallback;
/// Configuration options for streaming.
///
/// Should only be set if a streamCallback is also present.
final CameraImageStreamOptions? streamOptions;
@override
bool operator ==(Object other) =>
identical(this, other) ||
other is VideoCaptureOptions &&
runtimeType == other.runtimeType &&
cameraId == other.cameraId &&
maxDuration == other.maxDuration &&
streamCallback == other.streamCallback &&
streamOptions == other.streamOptions;
@override
int get hashCode =>
Object.hash(cameraId, maxDuration, streamCallback, streamOptions);
}
| packages/packages/camera/camera_platform_interface/lib/src/types/video_capture_options.dart/0 | {
"file_path": "packages/packages/camera/camera_platform_interface/lib/src/types/video_capture_options.dart",
"repo_id": "packages",
"token_count": 513
} | 986 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:camera_platform_interface/camera_platform_interface.dart';
import 'package:flutter_test/flutter_test.dart';
void main() {
test('ResolutionPreset should contain 6 options', () {
const List<ResolutionPreset> values = ResolutionPreset.values;
expect(values.length, 6);
});
test('ResolutionPreset enum should have items in correct index', () {
const List<ResolutionPreset> values = ResolutionPreset.values;
expect(values[0], ResolutionPreset.low);
expect(values[1], ResolutionPreset.medium);
expect(values[2], ResolutionPreset.high);
expect(values[3], ResolutionPreset.veryHigh);
expect(values[4], ResolutionPreset.ultraHigh);
expect(values[5], ResolutionPreset.max);
});
}
| packages/packages/camera/camera_platform_interface/test/types/resolution_preset_test.dart/0 | {
"file_path": "packages/packages/camera/camera_platform_interface/test/types/resolution_preset_test.dart",
"repo_id": "packages",
"token_count": 278
} | 987 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// ignore_for_file: avoid_implementing_value_types
import 'dart:async';
import 'dart:html';
import 'dart:ui';
import 'package:camera_web/src/camera.dart';
import 'package:camera_web/src/camera_service.dart';
import 'package:camera_web/src/shims/dart_js_util.dart';
import 'package:camera_web/src/types/types.dart';
import 'package:cross_file/cross_file.dart';
import 'package:mocktail/mocktail.dart';
class MockWindow extends Mock implements Window {}
class MockScreen extends Mock implements Screen {}
class MockScreenOrientation extends Mock implements ScreenOrientation {}
class MockDocument extends Mock implements Document {}
class MockElement extends Mock implements Element {}
class MockNavigator extends Mock implements Navigator {}
class MockMediaDevices extends Mock implements MediaDevices {}
class MockCameraService extends Mock implements CameraService {}
class MockMediaStreamTrack extends Mock implements MediaStreamTrack {}
class MockCamera extends Mock implements Camera {}
class MockCameraOptions extends Mock implements CameraOptions {}
class MockVideoElement extends Mock implements VideoElement {}
class MockXFile extends Mock implements XFile {}
class MockJsUtil extends Mock implements JsUtil {}
class MockMediaRecorder extends Mock implements MediaRecorder {}
/// A fake [MediaStream] that returns the provided [_videoTracks].
class FakeMediaStream extends Fake implements MediaStream {
FakeMediaStream(this._videoTracks);
final List<MediaStreamTrack> _videoTracks;
@override
List<MediaStreamTrack> getVideoTracks() => _videoTracks;
}
/// A fake [MediaDeviceInfo] that returns the provided [_deviceId], [_label] and [_kind].
class FakeMediaDeviceInfo extends Fake implements MediaDeviceInfo {
FakeMediaDeviceInfo(this._deviceId, this._label, this._kind);
final String _deviceId;
final String _label;
final String _kind;
@override
String? get deviceId => _deviceId;
@override
String? get label => _label;
@override
String? get kind => _kind;
}
/// A fake [MediaError] that returns the provided error [_code] and [_message].
class FakeMediaError extends Fake implements MediaError {
FakeMediaError(
this._code, [
String message = '',
]) : _message = message;
final int _code;
final String _message;
@override
int get code => _code;
@override
String? get message => _message;
}
/// A fake [DomException] that returns the provided error [_name] and [_message].
class FakeDomException extends Fake implements DomException {
FakeDomException(
this._name, [
String? message,
]) : _message = message;
final String _name;
final String? _message;
@override
String get name => _name;
@override
String? get message => _message;
}
/// A fake [ElementStream] that listens to the provided [_stream] on [listen].
class FakeElementStream<T extends Event> extends Fake
implements ElementStream<T> {
FakeElementStream(this._stream);
final Stream<T> _stream;
@override
StreamSubscription<T> listen(void Function(T event)? onData,
{Function? onError, void Function()? onDone, bool? cancelOnError}) {
return _stream.listen(
onData,
onError: onError,
onDone: onDone,
cancelOnError: cancelOnError,
);
}
}
/// A fake [BlobEvent] that returns the provided blob [data].
class FakeBlobEvent extends Fake implements BlobEvent {
FakeBlobEvent(this._blob);
final Blob? _blob;
@override
Blob? get data => _blob;
}
/// A fake [DomException] that returns the provided error [_name] and [_message].
class FakeErrorEvent extends Fake implements ErrorEvent {
FakeErrorEvent(
String type, [
String? message,
]) : _type = type,
_message = message;
final String _type;
final String? _message;
@override
String get type => _type;
@override
String? get message => _message;
}
/// Returns a video element with a blank stream of size [videoSize].
///
/// Can be used to mock a video stream:
/// ```dart
/// final videoElement = getVideoElementWithBlankStream(Size(100, 100));
/// final videoStream = videoElement.captureStream();
/// ```
VideoElement getVideoElementWithBlankStream(Size videoSize) {
final CanvasElement canvasElement = CanvasElement(
width: videoSize.width.toInt(),
height: videoSize.height.toInt(),
)..context2D.fillRect(0, 0, videoSize.width, videoSize.height);
final VideoElement videoElement = VideoElement()
..srcObject = canvasElement.captureStream();
return videoElement;
}
| packages/packages/camera/camera_web/example/integration_test/helpers/mocks.dart/0 | {
"file_path": "packages/packages/camera/camera_web/example/integration_test/helpers/mocks.dart",
"repo_id": "packages",
"token_count": 1385
} | 988 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:convert';
import 'dart:typed_data';
/// The interface for a CrossFile.
///
/// A CrossFile is a container that wraps the path of a selected
/// file by the user and (in some platforms, like web) the bytes
/// with the contents of the file.
///
/// This class is a very limited subset of dart:io [File], so all
/// the methods should seem familiar.
abstract class XFileBase {
/// Construct a CrossFile
// ignore: avoid_unused_constructor_parameters
XFileBase(String? path);
/// Save the CrossFile at the indicated file path.
Future<void> saveTo(String path) {
throw UnimplementedError('saveTo has not been implemented.');
}
/// Get the path of the picked file.
///
/// This should only be used as a backwards-compatibility clutch
/// for mobile apps, or cosmetic reasons only (to show the user
/// the path they've picked).
///
/// Accessing the data contained in the picked file by its path
/// is platform-dependant (and won't work on web), so use the
/// byte getters in the CrossFile instance instead.
String get path {
throw UnimplementedError('.path has not been implemented.');
}
/// The name of the file as it was selected by the user in their device.
///
/// For non-web implementation, this represents the last part of the filesystem path.
///
/// Use only for cosmetic reasons, do not try to use this as a path.
String get name {
throw UnimplementedError('.name has not been implemented.');
}
/// For web, it may be necessary for a file to know its MIME type.
String? get mimeType {
throw UnimplementedError('.mimeType has not been implemented.');
}
/// Get the length of the file. Returns a `Future<int>` that completes with the length in bytes.
Future<int> length() {
throw UnimplementedError('.length() has not been implemented.');
}
/// Asynchronously read the entire file contents as a string using the given [Encoding].
///
/// By default, `encoding` is [utf8].
///
/// Throws Exception if the operation fails.
Future<String> readAsString({Encoding encoding = utf8}) {
throw UnimplementedError('readAsString() has not been implemented.');
}
/// Asynchronously read the entire file contents as a list of bytes.
///
/// Throws Exception if the operation fails.
Future<Uint8List> readAsBytes() {
throw UnimplementedError('readAsBytes() has not been implemented.');
}
/// Create a new independent [Stream] for the contents of this file.
///
/// If `start` is present, the file will be read from byte-offset `start`. Otherwise from the beginning (index 0).
///
/// If `end` is present, only up to byte-index `end` will be read. Otherwise, until end of file.
///
/// In order to make sure that system resources are freed, the stream must be read to completion or the subscription on the stream must be cancelled.
Stream<Uint8List> openRead([int? start, int? end]) {
throw UnimplementedError('openRead() has not been implemented.');
}
/// Get the last-modified time for the CrossFile
Future<DateTime> lastModified() {
throw UnimplementedError('lastModified() has not been implemented.');
}
}
| packages/packages/cross_file/lib/src/types/base.dart/0 | {
"file_path": "packages/packages/cross_file/lib/src/types/base.dart",
"repo_id": "packages",
"token_count": 942
} | 989 |
name: css_colors
description: Defines constant dart:ui Color objects for CSS colors (for use in Flutter code).
repository: https://github.com/flutter/packages/tree/main/packages/css_colors
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+css_colors%22
version: 1.1.4
environment:
sdk: ^3.1.0
flutter: ">=3.13.0"
dependencies:
flutter:
sdk: flutter
flutter_test:
sdk: flutter
topics:
- color
- css
- ui
| packages/packages/css_colors/pubspec.yaml/0 | {
"file_path": "packages/packages/css_colors/pubspec.yaml",
"repo_id": "packages",
"token_count": 198
} | 990 |
#include "Generated.xcconfig"
| packages/packages/dynamic_layouts/example/ios/Flutter/Debug.xcconfig/0 | {
"file_path": "packages/packages/dynamic_layouts/example/ios/Flutter/Debug.xcconfig",
"repo_id": "packages",
"token_count": 12
} | 991 |
#include "ephemeral/Flutter-Generated.xcconfig"
| packages/packages/dynamic_layouts/example/macos/Flutter/Flutter-Release.xcconfig/0 | {
"file_path": "packages/packages/dynamic_layouts/example/macos/Flutter/Flutter-Release.xcconfig",
"repo_id": "packages",
"token_count": 19
} | 992 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package androidx.test.espresso.flutter.action;
import android.view.View;
import androidx.test.annotation.ExperimentalTestApi;
import androidx.test.espresso.UiController;
import androidx.test.espresso.flutter.api.FlutterTestingProtocol;
import androidx.test.espresso.flutter.api.SyntheticAction;
import androidx.test.espresso.flutter.api.WidgetAction;
import androidx.test.espresso.flutter.api.WidgetMatcher;
import java.util.concurrent.Future;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* A synthetic click on a Flutter widget.
*
* <p>Note, this is not a real click gesture event issued from Android system. Espresso delegates to
* Flutter engine to perform the {@link SyntheticClick} action.
*/
public final class SyntheticClickAction implements WidgetAction {
@ExperimentalTestApi
@Override
public Future<Void> perform(
@Nullable WidgetMatcher targetWidget,
@Nonnull View flutterView,
@Nonnull FlutterTestingProtocol flutterTestingProtocol,
@Nonnull UiController androidUiController) {
return flutterTestingProtocol.perform(targetWidget, new SyntheticClick());
}
@Override
public String toString() {
return "click";
}
static class SyntheticClick extends SyntheticAction {
public SyntheticClick() {
super("tap");
}
}
}
| packages/packages/espresso/android/src/main/java/androidx/test/espresso/flutter/action/SyntheticClickAction.java/0 | {
"file_path": "packages/packages/espresso/android/src/main/java/androidx/test/espresso/flutter/action/SyntheticClickAction.java",
"repo_id": "packages",
"token_count": 461
} | 993 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package androidx.test.espresso.flutter.internal.protocol.impl;
/**
* Represents a condition that waits until no pending frame is scheduled in the Flutter framework.
*/
class NoPendingFrameCondition extends WaitCondition {
public NoPendingFrameCondition() {
super("NoPendingFrameCondition");
}
}
| packages/packages/espresso/android/src/main/java/androidx/test/espresso/flutter/internal/protocol/impl/NoPendingFrameCondition.java/0 | {
"file_path": "packages/packages/espresso/android/src/main/java/androidx/test/espresso/flutter/internal/protocol/impl/NoPendingFrameCondition.java",
"repo_id": "packages",
"token_count": 125
} | 994 |
# extension_google_sign_in_as_googleapis_auth
A bridge package between Flutter's [`google_sign_in` plugin](https://pub.dev/packages/google_sign_in) and Dart's [`googleapis` package](https://pub.dev/packages/googleapis), that is able to create [`googleapis_auth`-like `AuthClient` instances](https://pub.dev/documentation/googleapis_auth/latest/googleapis_auth/AuthClient-class.html) directly from the `GoogleSignIn` plugin.
## Usage
This package is implemented as an [extension method](https://dart.dev/guides/language/extension-methods) on top of the `GoogleSignIn` plugin.
In order to use it, you need to add a `dependency` to your `pubspec.yaml`. Then, wherever you're importing `package:google_sign_in/google_sign_in.dart`, add the following:
<?code-excerpt "example/lib/main.dart (Import)"?>
```dart
import 'package:extension_google_sign_in_as_googleapis_auth/extension_google_sign_in_as_googleapis_auth.dart';
```
From that moment on, your `GoogleSignIn` instance will have an additional `Future<AuthClient?> authenticatedClient()` method that you can call once your sign in is successful to retrieve an `AuthClient`.
That object can then be used to create instances of `googleapis` API clients:
<?code-excerpt "example/lib/main.dart (CreateAPIClient)"?>
```dart
// Retrieve an [auth.AuthClient] from the current [GoogleSignIn] instance.
final auth.AuthClient? client = await _googleSignIn.authenticatedClient();
assert(client != null, 'Authenticated client missing!');
// Prepare a People Service authenticated client.
final PeopleServiceApi peopleApi = PeopleServiceApi(client!);
// Retrieve a list of the `names` of my `connections`
final ListConnectionsResponse response =
await peopleApi.people.connections.list(
'people/me',
personFields: 'names',
);
```
## Example
This package contains a modified version of Flutter's Google Sign In example app that uses `package:googleapis`' API clients, instead of raw http requests.
See it [here](https://github.com/flutter/packages/blob/main/packages/extension_google_sign_in_as_googleapis_auth/example/lib/main.dart).
The original code (and its license) can be seen [here](https://github.com/flutter/packages/tree/main/packages/google_sign_in/google_sign_in/example/lib/main.dart).
## Testing
Run tests with `flutter test`.
## Issues and feedback
Please file [issues](https://github.com/flutter/flutter/issues/new)
to send feedback or report a bug. Thank you!
| packages/packages/extension_google_sign_in_as_googleapis_auth/README.md/0 | {
"file_path": "packages/packages/extension_google_sign_in_as_googleapis_auth/README.md",
"repo_id": "packages",
"token_count": 747
} | 995 |
# file_selector
<?code-excerpt path-base="example/lib"?>
[](https://pub.dartlang.org/packages/file_selector)
A Flutter plugin that manages files and interactions with file dialogs.
| | Android | iOS | Linux | macOS | Web | Windows |
|-------------|---------|---------|-------|--------|-----|-------------|
| **Support** | SDK 19+ | iOS 12+ | Any | 10.14+ | Any | Windows 10+ |
## Usage
To use this plugin, add `file_selector` as a [dependency in your pubspec.yaml file](https://flutter.dev/platform-plugins/).
### macOS
You will need to [add an entitlement][entitlement] for either read-only access:
```xml
<key>com.apple.security.files.user-selected.read-only</key>
<true/>
```
or read/write access:
```xml
<key>com.apple.security.files.user-selected.read-write</key>
<true/>
```
depending on your use case.
### Examples
Here are small examples that show you how to use the API.
Please also take a look at our [example][example] app.
#### Open a single file
<?code-excerpt "open_image_page.dart (SingleOpen)"?>
```dart
const XTypeGroup typeGroup = XTypeGroup(
label: 'images',
extensions: <String>['jpg', 'png'],
);
final XFile? file =
await openFile(acceptedTypeGroups: <XTypeGroup>[typeGroup]);
```
#### Open multiple files at once
<?code-excerpt "open_multiple_images_page.dart (MultiOpen)"?>
```dart
const XTypeGroup jpgsTypeGroup = XTypeGroup(
label: 'JPEGs',
extensions: <String>['jpg', 'jpeg'],
);
const XTypeGroup pngTypeGroup = XTypeGroup(
label: 'PNGs',
extensions: <String>['png'],
);
final List<XFile> files = await openFiles(acceptedTypeGroups: <XTypeGroup>[
jpgsTypeGroup,
pngTypeGroup,
]);
```
#### Save a file
<?code-excerpt "readme_standalone_excerpts.dart (Save)"?>
```dart
const String fileName = 'suggested_name.txt';
final FileSaveLocation? result =
await getSaveLocation(suggestedName: fileName);
if (result == null) {
// Operation was canceled by the user.
return;
}
final Uint8List fileData = Uint8List.fromList('Hello World!'.codeUnits);
const String mimeType = 'text/plain';
final XFile textFile =
XFile.fromData(fileData, mimeType: mimeType, name: fileName);
await textFile.saveTo(result.path);
```
#### Get a directory path
<?code-excerpt "readme_standalone_excerpts.dart (GetDirectory)"?>
```dart
final String? directoryPath = await getDirectoryPath();
if (directoryPath == null) {
// Operation was canceled by the user.
return;
}
```
### Filtering by file types
Different platforms support different type group filter options. To avoid
`ArgumentError`s on some platforms, ensure that any `XTypeGroup`s you pass set
filters that cover all platforms you are targeting, or that you conditionally
pass different `XTypeGroup`s based on `Platform`.
| | Andoid | iOS | Linux | macOS | Web | Windows |
|--------------------------|--------|-----|-------|--------|-----|-------------|
| `extensions` | ✔️ | | ✔️ | ✔️ | ✔️ | ✔️ |
| `mimeTypes` | ✔️ | | ✔️ | ✔️† | ✔️ | |
| `uniformTypeIdentifiers` | | ✔️ | | ✔️ | | |
| `webWildCards` | | | | | ✔️ | |
† `mimeTypes` are not supported on version of macOS earlier than 11 (Big Sur).
### Features supported by platform
| Feature | Description | Android | iOS | Linux | macOS | Windows | Web |
| ---------------------- |----------------------------------- |---------|--------- | ---------- | -------- | ------------ | ----------- |
| Choose a single file | Pick a file/image | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ |
| Choose multiple files | Pick multiple files/images | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ | ✔️ |
| Choose a save location | Pick a directory to save a file in | ❌ | ❌ | ✔️ | ✔️ | ✔️ | ❌ |
| Choose a directory | Pick a directory and get its path | ✔️† | ❌ | ✔️ | ✔️ | ✔️ | ❌ |
† Choosing a directory is no supported on versions of Android before SDK 21 (Lollipop).
[example]:./example
[entitlement]: https://docs.flutter.dev/desktop#entitlements-and-the-app-sandbox
| packages/packages/file_selector/file_selector/README.md/0 | {
"file_path": "packages/packages/file_selector/file_selector/README.md",
"repo_id": "packages",
"token_count": 1794
} | 996 |
name: file_selector
description: Flutter plugin for opening and saving files, or selecting
directories, using native file selection UI.
repository: https://github.com/flutter/packages/tree/main/packages/file_selector/file_selector
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+file_selector%22
version: 1.0.3
environment:
sdk: ^3.1.0
flutter: ">=3.13.0"
flutter:
plugin:
platforms:
android:
default_package: file_selector_android
ios:
default_package: file_selector_ios
linux:
default_package: file_selector_linux
macos:
default_package: file_selector_macos
web:
default_package: file_selector_web
windows:
default_package: file_selector_windows
dependencies:
file_selector_android: ^0.5.0
file_selector_ios: ^0.5.0
file_selector_linux: ^0.9.2
file_selector_macos: ^0.9.3
file_selector_platform_interface: ^2.6.0
file_selector_web: ^0.9.1
file_selector_windows: ^0.9.3
flutter:
sdk: flutter
dev_dependencies:
flutter_test:
sdk: flutter
plugin_platform_interface: ^2.1.7
test: ^1.16.3
topics:
- files
- file-selection
- file-selector
| packages/packages/file_selector/file_selector/pubspec.yaml/0 | {
"file_path": "packages/packages/file_selector/file_selector/pubspec.yaml",
"repo_id": "packages",
"token_count": 520
} | 997 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Autogenerated from Pigeon (v9.2.5), do not edit directly.
// See also: https://pub.dev/packages/pigeon
// ignore_for_file: public_member_api_docs, non_constant_identifier_names, avoid_as, unused_import, unnecessary_parenthesis, prefer_null_aware_operators, omit_local_variable_types, unused_shown_name, unnecessary_import
import 'dart:async';
import 'dart:typed_data' show Float64List, Int32List, Int64List, Uint8List;
import 'package:flutter/foundation.dart' show ReadBuffer, WriteBuffer;
import 'package:flutter/services.dart';
class FileResponse {
FileResponse({
required this.path,
this.mimeType,
this.name,
required this.size,
required this.bytes,
});
String path;
String? mimeType;
String? name;
int size;
Uint8List bytes;
Object encode() {
return <Object?>[
path,
mimeType,
name,
size,
bytes,
];
}
static FileResponse decode(Object result) {
result as List<Object?>;
return FileResponse(
path: result[0]! as String,
mimeType: result[1] as String?,
name: result[2] as String?,
size: result[3]! as int,
bytes: result[4]! as Uint8List,
);
}
}
class FileTypes {
FileTypes({
required this.mimeTypes,
required this.extensions,
});
List<String?> mimeTypes;
List<String?> extensions;
Object encode() {
return <Object?>[
mimeTypes,
extensions,
];
}
static FileTypes decode(Object result) {
result as List<Object?>;
return FileTypes(
mimeTypes: (result[0] as List<Object?>?)!.cast<String?>(),
extensions: (result[1] as List<Object?>?)!.cast<String?>(),
);
}
}
class _FileSelectorApiCodec extends StandardMessageCodec {
const _FileSelectorApiCodec();
@override
void writeValue(WriteBuffer buffer, Object? value) {
if (value is FileResponse) {
buffer.putUint8(128);
writeValue(buffer, value.encode());
} else if (value is FileTypes) {
buffer.putUint8(129);
writeValue(buffer, value.encode());
} else {
super.writeValue(buffer, value);
}
}
@override
Object? readValueOfType(int type, ReadBuffer buffer) {
switch (type) {
case 128:
return FileResponse.decode(readValue(buffer)!);
case 129:
return FileTypes.decode(readValue(buffer)!);
default:
return super.readValueOfType(type, buffer);
}
}
}
/// An API to call to native code to select files or directories.
class FileSelectorApi {
/// Constructor for [FileSelectorApi]. The [binaryMessenger] named argument is
/// available for dependency injection. If it is left null, the default
/// BinaryMessenger will be used which routes to the host platform.
FileSelectorApi({BinaryMessenger? binaryMessenger})
: _binaryMessenger = binaryMessenger;
final BinaryMessenger? _binaryMessenger;
static const MessageCodec<Object?> codec = _FileSelectorApiCodec();
/// Opens a file dialog for loading files and returns a file path.
///
/// Returns `null` if user cancels the operation.
Future<FileResponse?> openFile(
String? arg_initialDirectory, FileTypes arg_allowedTypes) async {
final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
'dev.flutter.pigeon.FileSelectorApi.openFile', codec,
binaryMessenger: _binaryMessenger);
final List<Object?>? replyList =
await channel.send(<Object?>[arg_initialDirectory, arg_allowedTypes])
as List<Object?>?;
if (replyList == null) {
throw PlatformException(
code: 'channel-error',
message: 'Unable to establish connection on channel.',
);
} else if (replyList.length > 1) {
throw PlatformException(
code: replyList[0]! as String,
message: replyList[1] as String?,
details: replyList[2],
);
} else {
return (replyList[0] as FileResponse?);
}
}
/// Opens a file dialog for loading files and returns a list of file responses
/// chosen by the user.
Future<List<FileResponse?>> openFiles(
String? arg_initialDirectory, FileTypes arg_allowedTypes) async {
final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
'dev.flutter.pigeon.FileSelectorApi.openFiles', codec,
binaryMessenger: _binaryMessenger);
final List<Object?>? replyList =
await channel.send(<Object?>[arg_initialDirectory, arg_allowedTypes])
as List<Object?>?;
if (replyList == null) {
throw PlatformException(
code: 'channel-error',
message: 'Unable to establish connection on channel.',
);
} else if (replyList.length > 1) {
throw PlatformException(
code: replyList[0]! as String,
message: replyList[1] as String?,
details: replyList[2],
);
} else if (replyList[0] == null) {
throw PlatformException(
code: 'null-error',
message: 'Host platform returned null value for non-null return value.',
);
} else {
return (replyList[0] as List<Object?>?)!.cast<FileResponse?>();
}
}
/// Opens a file dialog for loading directories and returns a directory path.
///
/// Returns `null` if user cancels the operation.
Future<String?> getDirectoryPath(String? arg_initialDirectory) async {
final BasicMessageChannel<Object?> channel = BasicMessageChannel<Object?>(
'dev.flutter.pigeon.FileSelectorApi.getDirectoryPath', codec,
binaryMessenger: _binaryMessenger);
final List<Object?>? replyList =
await channel.send(<Object?>[arg_initialDirectory]) as List<Object?>?;
if (replyList == null) {
throw PlatformException(
code: 'channel-error',
message: 'Unable to establish connection on channel.',
);
} else if (replyList.length > 1) {
throw PlatformException(
code: replyList[0]! as String,
message: replyList[1] as String?,
details: replyList[2],
);
} else {
return (replyList[0] as String?);
}
}
}
| packages/packages/file_selector/file_selector_android/lib/src/file_selector_api.g.dart/0 | {
"file_path": "packages/packages/file_selector/file_selector_android/lib/src/file_selector_api.g.dart",
"repo_id": "packages",
"token_count": 2314
} | 998 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "include/file_selector_linux/file_selector_plugin.h"
#include <flutter_linux/flutter_linux.h>
#include <gtest/gtest.h>
#include <gtk/gtk.h>
#include "file_selector_plugin_private.h"
// TODO(stuartmorgan): Restructure the helper to take a callback for showing
// the dialog, so that the tests can mock out that callback with something
// that changes the selection so that the return value path can be tested
// as well.
// TODO(stuartmorgan): Add an injectable wrapper around
// gtk_file_chooser_native_new to allow for testing values that are given as
// construction paramaters and can't be queried later.
TEST(FileSelectorPlugin, TestOpenSimple) {
g_autoptr(FlValue) args = fl_value_new_map();
g_autoptr(GtkFileChooserNative) dialog =
create_dialog_for_method(nullptr, "openFile", args);
ASSERT_NE(dialog, nullptr);
EXPECT_EQ(gtk_file_chooser_get_action(GTK_FILE_CHOOSER(dialog)),
GTK_FILE_CHOOSER_ACTION_OPEN);
EXPECT_EQ(gtk_file_chooser_get_select_multiple(GTK_FILE_CHOOSER(dialog)),
false);
}
TEST(FileSelectorPlugin, TestOpenMultiple) {
g_autoptr(FlValue) args = fl_value_new_map();
fl_value_set_string_take(args, "multiple", fl_value_new_bool(true));
g_autoptr(GtkFileChooserNative) dialog =
create_dialog_for_method(nullptr, "openFile", args);
ASSERT_NE(dialog, nullptr);
EXPECT_EQ(gtk_file_chooser_get_action(GTK_FILE_CHOOSER(dialog)),
GTK_FILE_CHOOSER_ACTION_OPEN);
EXPECT_EQ(gtk_file_chooser_get_select_multiple(GTK_FILE_CHOOSER(dialog)),
true);
}
TEST(FileSelectorPlugin, TestOpenWithFilter) {
g_autoptr(FlValue) type_groups = fl_value_new_list();
{
g_autoptr(FlValue) text_group_mime_types = fl_value_new_list();
fl_value_append_take(text_group_mime_types,
fl_value_new_string("text/plain"));
g_autoptr(FlValue) text_group = fl_value_new_map();
fl_value_set_string_take(text_group, "label", fl_value_new_string("Text"));
fl_value_set_string(text_group, "mimeTypes", text_group_mime_types);
fl_value_append(type_groups, text_group);
}
{
g_autoptr(FlValue) image_group_extensions = fl_value_new_list();
fl_value_append_take(image_group_extensions, fl_value_new_string("*.png"));
fl_value_append_take(image_group_extensions, fl_value_new_string("*.gif"));
fl_value_append_take(image_group_extensions,
fl_value_new_string("*.jgpeg"));
g_autoptr(FlValue) image_group = fl_value_new_map();
fl_value_set_string_take(image_group, "label",
fl_value_new_string("Images"));
fl_value_set_string(image_group, "extensions", image_group_extensions);
fl_value_append(type_groups, image_group);
}
{
g_autoptr(FlValue) any_group_extensions = fl_value_new_list();
fl_value_append_take(any_group_extensions, fl_value_new_string("*"));
g_autoptr(FlValue) any_group = fl_value_new_map();
fl_value_set_string_take(any_group, "label", fl_value_new_string("Any"));
fl_value_set_string(any_group, "extensions", any_group_extensions);
fl_value_append(type_groups, any_group);
}
g_autoptr(FlValue) args = fl_value_new_map();
fl_value_set_string(args, "acceptedTypeGroups", type_groups);
g_autoptr(GtkFileChooserNative) dialog =
create_dialog_for_method(nullptr, "openFile", args);
ASSERT_NE(dialog, nullptr);
EXPECT_EQ(gtk_file_chooser_get_action(GTK_FILE_CHOOSER(dialog)),
GTK_FILE_CHOOSER_ACTION_OPEN);
EXPECT_EQ(gtk_file_chooser_get_select_multiple(GTK_FILE_CHOOSER(dialog)),
false);
// Validate filters.
g_autoptr(GSList) type_group_list =
gtk_file_chooser_list_filters(GTK_FILE_CHOOSER(dialog));
EXPECT_EQ(g_slist_length(type_group_list), 3);
GtkFileFilter* text_filter =
GTK_FILE_FILTER(g_slist_nth_data(type_group_list, 0));
GtkFileFilter* image_filter =
GTK_FILE_FILTER(g_slist_nth_data(type_group_list, 1));
GtkFileFilter* any_filter =
GTK_FILE_FILTER(g_slist_nth_data(type_group_list, 2));
// Filters can't be inspected, so query them to see that they match expected
// filter behavior.
GtkFileFilterInfo text_file_info = {};
text_file_info.contains = static_cast<GtkFileFilterFlags>(
GTK_FILE_FILTER_DISPLAY_NAME | GTK_FILE_FILTER_MIME_TYPE);
text_file_info.display_name = "foo.txt";
text_file_info.mime_type = "text/plain";
GtkFileFilterInfo image_file_info = {};
image_file_info.contains = static_cast<GtkFileFilterFlags>(
GTK_FILE_FILTER_DISPLAY_NAME | GTK_FILE_FILTER_MIME_TYPE);
image_file_info.display_name = "foo.png";
image_file_info.mime_type = "image/png";
EXPECT_TRUE(gtk_file_filter_filter(text_filter, &text_file_info));
EXPECT_FALSE(gtk_file_filter_filter(text_filter, &image_file_info));
EXPECT_FALSE(gtk_file_filter_filter(image_filter, &text_file_info));
EXPECT_TRUE(gtk_file_filter_filter(image_filter, &image_file_info));
EXPECT_TRUE(gtk_file_filter_filter(any_filter, &image_file_info));
EXPECT_TRUE(gtk_file_filter_filter(any_filter, &text_file_info));
}
TEST(FileSelectorPlugin, TestSaveSimple) {
g_autoptr(FlValue) args = fl_value_new_map();
g_autoptr(GtkFileChooserNative) dialog =
create_dialog_for_method(nullptr, "getSavePath", args);
ASSERT_NE(dialog, nullptr);
EXPECT_EQ(gtk_file_chooser_get_action(GTK_FILE_CHOOSER(dialog)),
GTK_FILE_CHOOSER_ACTION_SAVE);
EXPECT_EQ(gtk_file_chooser_get_select_multiple(GTK_FILE_CHOOSER(dialog)),
false);
}
TEST(FileSelectorPlugin, TestSaveWithArguments) {
g_autoptr(FlValue) args = fl_value_new_map();
fl_value_set_string_take(args, "initialDirectory",
fl_value_new_string("/tmp"));
fl_value_set_string_take(args, "suggestedName",
fl_value_new_string("foo.txt"));
g_autoptr(GtkFileChooserNative) dialog =
create_dialog_for_method(nullptr, "getSavePath", args);
ASSERT_NE(dialog, nullptr);
EXPECT_EQ(gtk_file_chooser_get_action(GTK_FILE_CHOOSER(dialog)),
GTK_FILE_CHOOSER_ACTION_SAVE);
EXPECT_EQ(gtk_file_chooser_get_select_multiple(GTK_FILE_CHOOSER(dialog)),
false);
g_autofree gchar* current_name =
gtk_file_chooser_get_current_name(GTK_FILE_CHOOSER(dialog));
EXPECT_STREQ(current_name, "foo.txt");
// TODO(stuartmorgan): gtk_file_chooser_get_current_folder doesn't seem to
// return a value set by gtk_file_chooser_set_current_folder, or at least
// doesn't in a test context, so that's not currently validated.
}
TEST(FileSelectorPlugin, TestGetDirectory) {
g_autoptr(FlValue) args = fl_value_new_map();
g_autoptr(GtkFileChooserNative) dialog =
create_dialog_for_method(nullptr, "getDirectoryPath", args);
ASSERT_NE(dialog, nullptr);
EXPECT_EQ(gtk_file_chooser_get_action(GTK_FILE_CHOOSER(dialog)),
GTK_FILE_CHOOSER_ACTION_SELECT_FOLDER);
EXPECT_EQ(gtk_file_chooser_get_select_multiple(GTK_FILE_CHOOSER(dialog)),
false);
}
TEST(FileSelectorPlugin, TestGetMultipleDirectories) {
g_autoptr(FlValue) args = fl_value_new_map();
fl_value_set_string_take(args, "multiple", fl_value_new_bool(true));
g_autoptr(GtkFileChooserNative) dialog =
create_dialog_for_method(nullptr, "getDirectoryPath", args);
ASSERT_NE(dialog, nullptr);
EXPECT_EQ(gtk_file_chooser_get_action(GTK_FILE_CHOOSER(dialog)),
GTK_FILE_CHOOSER_ACTION_SELECT_FOLDER);
EXPECT_EQ(gtk_file_chooser_get_select_multiple(GTK_FILE_CHOOSER(dialog)),
true);
}
| packages/packages/file_selector/file_selector_linux/linux/test/file_selector_plugin_test.cc/0 | {
"file_path": "packages/packages/file_selector/file_selector_linux/linux/test/file_selector_plugin_test.cc",
"repo_id": "packages",
"token_count": 3237
} | 999 |
name: example
description: Example for file_selector_windows implementation.
publish_to: 'none'
version: 1.0.0
environment:
sdk: ^3.1.0
flutter: ">=3.13.0"
dependencies:
file_selector_platform_interface: ^2.6.0
file_selector_windows:
# When depending on this package from a real application you should use:
# file_selector_windows: ^x.y.z
# See https://dart.dev/tools/pub/dependencies#version-constraints
# The example app is bundled with the plugin so we use a path dependency on
# the parent directory to use the current plugin's version.
path: ..
flutter:
sdk: flutter
dev_dependencies:
flutter_test:
sdk: flutter
flutter:
uses-material-design: true
| packages/packages/file_selector/file_selector_windows/example/pubspec.yaml/0 | {
"file_path": "packages/packages/file_selector/file_selector_windows/example/pubspec.yaml",
"repo_id": "packages",
"token_count": 251
} | 1,000 |
<?code-excerpt path-base="example/lib"?>
# Adaptive Scaffold
`AdaptiveScaffold` reacts to input from users, devices and screen elements and
renders your Flutter application according to the
[Material 3](https://m3.material.io/foundations/adaptive-design/overview)
guidelines.
To see examples of using these widgets to make a simple but common adaptive
layout:
```bash
cd example/
flutter run --release
```
## AdaptiveScaffold
AdaptiveScaffold implements the basic visual layout structure for Material
Design 3 that adapts to a variety of screens. It provides a preset of layout,
including positions and animations, by handling macro changes in navigational
elements and bodies based on the current features of the screen, namely screen
width and platform. For example, the navigational elements would be a
BottomNavigationBar on a small mobile device and a NavigationRail on larger
devices. The body is the primary screen that takes up the space left by the
navigational elements. The secondaryBody acts as an option to split the space
between two panes for purposes such as having a detail view. There is some
automatic functionality with foldables to handle the split between panels
properly. AdaptiveScaffold is much simpler to use but is not the best if you
would like high customizability. Apps that would like more refined layout and/or
animation should use AdaptiveLayout.
### Example Usage
<?code-excerpt "adaptive_scaffold_demo.dart (Example)"?>
```dart
@override
Widget build(BuildContext context) {
// Define the children to display within the body at different breakpoints.
final List<Widget> children = <Widget>[
for (int i = 0; i < 10; i++)
Padding(
padding: const EdgeInsets.all(8.0),
child: Container(
color: const Color.fromARGB(255, 255, 201, 197),
height: 400,
),
)
];
return AdaptiveScaffold(
// An option to override the default transition duration.
transitionDuration: Duration(milliseconds: _transitionDuration),
// An option to override the default breakpoints used for small, medium,
// and large.
smallBreakpoint: const WidthPlatformBreakpoint(end: 700),
mediumBreakpoint: const WidthPlatformBreakpoint(begin: 700, end: 1000),
largeBreakpoint: const WidthPlatformBreakpoint(begin: 1000),
useDrawer: false,
selectedIndex: _selectedTab,
onSelectedIndexChange: (int index) {
setState(() {
_selectedTab = index;
});
},
destinations: const <NavigationDestination>[
NavigationDestination(
icon: Icon(Icons.inbox_outlined),
selectedIcon: Icon(Icons.inbox),
label: 'Inbox',
),
NavigationDestination(
icon: Icon(Icons.article_outlined),
selectedIcon: Icon(Icons.article),
label: 'Articles',
),
NavigationDestination(
icon: Icon(Icons.chat_outlined),
selectedIcon: Icon(Icons.chat),
label: 'Chat',
),
NavigationDestination(
icon: Icon(Icons.video_call_outlined),
selectedIcon: Icon(Icons.video_call),
label: 'Video',
),
NavigationDestination(
icon: Icon(Icons.home_outlined),
selectedIcon: Icon(Icons.home),
label: 'Inbox',
),
],
body: (_) => GridView.count(crossAxisCount: 2, children: children),
smallBody: (_) => ListView.builder(
itemCount: children.length,
itemBuilder: (_, int idx) => children[idx],
),
// Define a default secondaryBody.
secondaryBody: (_) => Container(
color: const Color.fromARGB(255, 234, 158, 192),
),
// Override the default secondaryBody during the smallBreakpoint to be
// empty. Must use AdaptiveScaffold.emptyBuilder to ensure it is properly
// overridden.
smallSecondaryBody: AdaptiveScaffold.emptyBuilder,
);
}
```
## The Background Widget Suite
These are the set of widgets that are used on a lower level and offer more
customizability at a cost of more lines of code.
### AdaptiveLayout

AdaptiveLayout is the top-level widget class that arranges the layout of the
slots and their animation, similar to Scaffold. It takes in several LayoutSlots
and returns an appropriate layout based on the diagram above. AdaptiveScaffold
is built upon AdaptiveLayout internally but abstracts some of the complexity
with presets based on the Material 3 Design specification.
### SlotLayout
SlotLayout handles the adaptivity or the changes between widgets at certain
Breakpoints. It also holds the logic for animating between breakpoints. It takes
SlotLayoutConfigs mapped to Breakpoints in a config and displays a widget based
on that information.
### SlotLayout.from
SlotLayout.from creates a SlotLayoutConfig holds the actual widget to be
displayed and the entrance animation and exit animation.
### Example Usage
<?code-excerpt "adaptive_layout_demo.dart (Example)"?>
```dart
// AdaptiveLayout has a number of slots that take SlotLayouts and these
// SlotLayouts' configs take maps of Breakpoints to SlotLayoutConfigs.
return AdaptiveLayout(
// An option to override the default transition duration.
transitionDuration: Duration(milliseconds: _transitionDuration),
// Primary navigation config has nothing from 0 to 600 dp screen width,
// then an unextended NavigationRail with no labels and just icons then an
// extended NavigationRail with both icons and labels.
primaryNavigation: SlotLayout(
config: <Breakpoint, SlotLayoutConfig>{
Breakpoints.medium: SlotLayout.from(
inAnimation: AdaptiveScaffold.leftOutIn,
key: const Key('Primary Navigation Medium'),
builder: (_) => AdaptiveScaffold.standardNavigationRail(
selectedIndex: selectedNavigation,
onDestinationSelected: (int newIndex) {
setState(() {
selectedNavigation = newIndex;
});
},
leading: const Icon(Icons.menu),
destinations: destinations
.map((NavigationDestination destination) =>
AdaptiveScaffold.toRailDestination(destination))
.toList(),
backgroundColor: navRailTheme.backgroundColor,
selectedIconTheme: navRailTheme.selectedIconTheme,
unselectedIconTheme: navRailTheme.unselectedIconTheme,
selectedLabelTextStyle: navRailTheme.selectedLabelTextStyle,
unSelectedLabelTextStyle: navRailTheme.unselectedLabelTextStyle,
),
),
Breakpoints.large: SlotLayout.from(
key: const Key('Primary Navigation Large'),
inAnimation: AdaptiveScaffold.leftOutIn,
builder: (_) => AdaptiveScaffold.standardNavigationRail(
selectedIndex: selectedNavigation,
onDestinationSelected: (int newIndex) {
setState(() {
selectedNavigation = newIndex;
});
},
extended: true,
leading: const Row(
mainAxisAlignment: MainAxisAlignment.spaceAround,
children: <Widget>[
Text(
'REPLY',
style: TextStyle(color: Color.fromARGB(255, 255, 201, 197)),
),
Icon(Icons.menu_open)
],
),
destinations: destinations
.map((NavigationDestination destination) =>
AdaptiveScaffold.toRailDestination(destination))
.toList(),
trailing: trailingNavRail,
backgroundColor: navRailTheme.backgroundColor,
selectedIconTheme: navRailTheme.selectedIconTheme,
unselectedIconTheme: navRailTheme.unselectedIconTheme,
selectedLabelTextStyle: navRailTheme.selectedLabelTextStyle,
unSelectedLabelTextStyle: navRailTheme.unselectedLabelTextStyle,
),
),
},
),
// Body switches between a ListView and a GridView from small to medium
// breakpoints and onwards.
body: SlotLayout(
config: <Breakpoint, SlotLayoutConfig>{
Breakpoints.small: SlotLayout.from(
key: const Key('Body Small'),
builder: (_) => ListView.builder(
itemCount: children.length,
itemBuilder: (BuildContext context, int index) => children[index],
),
),
Breakpoints.mediumAndUp: SlotLayout.from(
key: const Key('Body Medium'),
builder: (_) =>
GridView.count(crossAxisCount: 2, children: children),
)
},
),
// BottomNavigation is only active in small views defined as under 600 dp
// width.
bottomNavigation: SlotLayout(
config: <Breakpoint, SlotLayoutConfig>{
Breakpoints.small: SlotLayout.from(
key: const Key('Bottom Navigation Small'),
inAnimation: AdaptiveScaffold.bottomToTop,
outAnimation: AdaptiveScaffold.topToBottom,
builder: (_) => AdaptiveScaffold.standardBottomNavigationBar(
destinations: destinations,
currentIndex: selectedNavigation,
onDestinationSelected: (int newIndex) {
setState(() {
selectedNavigation = newIndex;
});
},
),
)
},
),
);
```
Both of the examples shown here produce the same output:

## Additional information
You can find more information on this package and its usage in the public
[design doc](https://docs.google.com/document/d/1qhrpTWYs5f67X8v32NCCNTRMIjSrVHuaMEFAul-Q_Ms/edit?usp=sharing).
| packages/packages/flutter_adaptive_scaffold/README.md/0 | {
"file_path": "packages/packages/flutter_adaptive_scaffold/README.md",
"repo_id": "packages",
"token_count": 3435
} | 1,001 |
#include "../../Flutter/Flutter-Debug.xcconfig"
#include "Warnings.xcconfig"
| packages/packages/flutter_image/example/macos/Runner/Configs/Debug.xcconfig/0 | {
"file_path": "packages/packages/flutter_image/example/macos/Runner/Configs/Debug.xcconfig",
"repo_id": "packages",
"token_count": 32
} | 1,002 |
name: flutter_lints
description: Recommended lints for Flutter apps, packages, and plugins to encourage good coding practices.
repository: https://github.com/flutter/packages/tree/main/packages/flutter_lints
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+flutter_lints%22
version: 3.0.1
environment:
sdk: ^3.1.0
dependencies:
lints: ^3.0.0
# Code is not allowed in this package. Do not add any dependencies or dev_dependencies.
topics:
- lints
| packages/packages/flutter_lints/pubspec.yaml/0 | {
"file_path": "packages/packages/flutter_lints/pubspec.yaml",
"repo_id": "packages",
"token_count": 179
} | 1,003 |
# Markdown Example
Markdown allows you to easily include formatted text, images, and even formatted Dart code in your app.
## Titles
Setext-style
```
This is an H1
=============
This is an H2
-------------
```
Atx-style
```
# This is an H1
## This is an H2
###### This is an H6
```
Select the valid headers:
- [x] `# hello`
- [ ] `#hello`
## Links
[Google's Homepage][Google]
```
[inline-style](https://www.google.com)
[reference-style][Google]
```
## Images

## Tables
|Syntax |Result |
|---------------------------------------|-------------------------------------|
|`*italic 1*` |*italic 1* |
|`_italic 2_` | _italic 2_ |
|`**bold 1**` |**bold 1** |
|`__bold 2__` |__bold 2__ |
|`This is a ~~strikethrough~~` |This is a ~~strikethrough~~ |
|`***italic bold 1***` |***italic bold 1*** |
|`___italic bold 2___` |___italic bold 2___ |
|`***~~italic bold strikethrough 1~~***`|***~~italic bold strikethrough 1~~***|
|`~~***italic bold strikethrough 2***~~`|~~***italic bold strikethrough 2***~~|
## Styling
Style text as _italic_, __bold__, ~~strikethrough~~, or `inline code`.
- Use bulleted lists
- To better clarify
- Your points
## Code blocks
Formatted Dart code looks really pretty too:
```
void main() {
runApp(MaterialApp(
home: Scaffold(
body: Markdown(data: markdownData),
),
));
}
```
## Center Title
###### ※ ※ ※
_* How to implement it see main.dart#L129 in example._
## Custom Syntax
NaOH + Al_2O_3 = NaAlO_2 + H_2O
C_4H_10 = C_2H_6 + C_2H_4
## Markdown widget
This is an example of how to create your own Markdown widget:
Markdown(data: 'Hello _world_!');
Enjoy!
[Google]: https://www.google.com/
## Line Breaks
This is an example of how to create line breaks (tab or two whitespaces):
line 1
line 2
line 3
| packages/packages/flutter_markdown/example/assets/original_markdown_example_data.md/0 | {
"file_path": "packages/packages/flutter_markdown/example/assets/original_markdown_example_data.md",
"repo_id": "packages",
"token_count": 1074
} | 1,004 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:flutter/cupertino.dart';
import 'package:flutter_markdown/flutter_markdown.dart';
// #docregion CreateMarkdownWithEmojiExtension
import 'package:markdown/markdown.dart' as md;
// #enddocregion CreateMarkdownWithEmojiExtension
/// Create a simple `Markdown` wdget.
void createMarkdown() {
const String markdownSource = '';
// #docregion CreateMarkdown
const Markdown(data: markdownSource);
// #enddocregion CreateMarkdown
}
/// Create a simple `MarkdownBody` widget.
void createMarkdownBody() {
const String markdownSource = '';
// #docregion CreateMarkdownBody
const MarkdownBody(data: markdownSource);
// #enddocregion CreateMarkdownBody
}
/// Create a simple `Markdown` widget with an emoji.
void createMarkdownWithEmoji() {
final ScrollController controller = ScrollController();
// #docregion CreateMarkdownWithEmoji
Markdown(
controller: controller,
selectable: true,
data: 'Insert emoji here😀 ',
);
// #enddocregion CreateMarkdownWithEmoji
}
/// Create a simple `Markdown` widget with an emoji extension.
void createMarkdownWithEmojiExtension() {
final ScrollController controller = ScrollController();
// #docregion CreateMarkdownWithEmojiExtension
Markdown(
controller: controller,
selectable: true,
data: 'Insert emoji :smiley: here',
extensionSet: md.ExtensionSet(
md.ExtensionSet.gitHubFlavored.blockSyntaxes,
<md.InlineSyntax>[
md.EmojiSyntax(),
...md.ExtensionSet.gitHubFlavored.inlineSyntaxes
],
),
);
// #enddocregion CreateMarkdownWithEmojiExtension
}
| packages/packages/flutter_markdown/example/lib/readme_excerpts.dart/0 | {
"file_path": "packages/packages/flutter_markdown/example/lib/readme_excerpts.dart",
"repo_id": "packages",
"token_count": 571
} | 1,005 |
#include "ephemeral/Flutter-Generated.xcconfig"
| packages/packages/flutter_markdown/example/macos/Flutter/Flutter-Release.xcconfig/0 | {
"file_path": "packages/packages/flutter_markdown/example/macos/Flutter/Flutter-Release.xcconfig",
"repo_id": "packages",
"token_count": 19
} | 1,006 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:flutter/widgets.dart';
import 'package:flutter_markdown/flutter_markdown.dart';
import 'package:flutter_test/flutter_test.dart';
import 'utils.dart';
void main() => defineTests();
void defineTests() {
group('Horizontal Rule', () {
testWidgets(
'3 consecutive hyphens',
(WidgetTester tester) async {
const String data = '---';
await tester.pumpWidget(boilerplate(const MarkdownBody(data: data)));
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Container,
DecoratedBox,
Padding,
LimitedBox,
ConstrainedBox
]);
},
);
testWidgets(
'5 consecutive hyphens',
(WidgetTester tester) async {
const String data = '-----';
await tester.pumpWidget(boilerplate(const MarkdownBody(data: data)));
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Container,
DecoratedBox,
Padding,
LimitedBox,
ConstrainedBox
]);
},
);
testWidgets(
'3 asterisks separated with spaces',
(WidgetTester tester) async {
const String data = '* * *';
await tester.pumpWidget(boilerplate(const MarkdownBody(data: data)));
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Container,
DecoratedBox,
Padding,
LimitedBox,
ConstrainedBox
]);
},
);
testWidgets(
'3 asterisks separated with spaces alongside text Markdown',
(WidgetTester tester) async {
const String data = '# h1\n ## h2\n* * *';
await tester.pumpWidget(boilerplate(const MarkdownBody(data: data)));
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Column,
Column,
Wrap,
Text,
RichText,
SizedBox,
Column,
Wrap,
Text,
RichText,
SizedBox,
Container,
DecoratedBox,
Padding,
LimitedBox,
ConstrainedBox
]);
},
);
});
}
| packages/packages/flutter_markdown/test/horizontal_rule_test.dart/0 | {
"file_path": "packages/packages/flutter_markdown/test/horizontal_rule_test.dart",
"repo_id": "packages",
"token_count": 1367
} | 1,007 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:flutter_markdown/flutter_markdown.dart';
import 'package:flutter_test/flutter_test.dart';
import 'utils.dart';
void main() => defineTests();
void defineTests() {
group('Data', () {
testWidgets(
'simple data',
(WidgetTester tester) async {
// extract to variable; if run with --track-widget-creation using const
// widgets aren't necessarily identical if created on different lines.
const Markdown markdown = Markdown(data: 'Data1');
await tester.pumpWidget(boilerplate(markdown));
expectTextStrings(tester.allWidgets, <String>['Data1']);
final String stateBefore = dumpRenderView();
await tester.pumpWidget(boilerplate(markdown));
final String stateAfter = dumpRenderView();
expect(stateBefore, equals(stateAfter));
await tester.pumpWidget(boilerplate(const Markdown(data: 'Data2')));
expectTextStrings(tester.allWidgets, <String>['Data2']);
},
);
});
group('Text', () {
testWidgets(
'Empty string',
(WidgetTester tester) async {
await tester.pumpWidget(
boilerplate(
const MarkdownBody(data: ''),
),
);
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Column,
]);
},
);
testWidgets(
'Simple string',
(WidgetTester tester) async {
await tester.pumpWidget(
boilerplate(
const MarkdownBody(data: 'Hello'),
),
);
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Column,
Wrap,
Text,
RichText,
]);
expectTextStrings(widgets, <String>['Hello']);
},
);
});
group('Leading spaces', () {
testWidgets(
// Example 192 from the GitHub Flavored Markdown specification.
'leading space are ignored', (WidgetTester tester) async {
const String data = ' aaa\n bbb';
await tester.pumpWidget(
boilerplate(
const MarkdownBody(data: data),
),
);
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Column,
Wrap,
Text,
RichText,
]);
expectTextStrings(widgets, <String>['aaa bbb']);
});
});
group('Line Break', () {
testWidgets(
// Example 654 from the GitHub Flavored Markdown specification.
'two spaces at end of line inside a block element',
(WidgetTester tester) async {
const String data = 'line 1 \nline 2';
await tester.pumpWidget(
boilerplate(
const MarkdownBody(data: data),
),
);
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(
widgets, <Type>[MarkdownBody, Column, Wrap, Text, RichText]);
expectTextStrings(widgets, <String>['line 1\nline 2']);
},
);
testWidgets(
// Example 655 from the GitHub Flavored Markdown specification.
'backslash at end of line inside a block element',
(WidgetTester tester) async {
const String data = 'line 1\\\nline 2';
await tester.pumpWidget(
boilerplate(
const MarkdownBody(data: data),
),
);
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(
widgets, <Type>[MarkdownBody, Column, Wrap, Text, RichText]);
expectTextStrings(widgets, <String>['line 1\nline 2']);
},
);
testWidgets(
'non-applicable line break',
(WidgetTester tester) async {
const String data = 'line 1.\nline 2.';
await tester.pumpWidget(
boilerplate(
const MarkdownBody(data: data),
),
);
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Column,
Wrap,
Text,
RichText,
]);
expectTextStrings(widgets, <String>['line 1. line 2.']);
},
);
testWidgets(
'non-applicable line break',
(WidgetTester tester) async {
const String data = 'line 1.\nline 2.';
await tester.pumpWidget(
boilerplate(
const MarkdownBody(data: data),
),
);
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Column,
Wrap,
Text,
RichText,
]);
expectTextStrings(widgets, <String>['line 1. line 2.']);
},
);
testWidgets(
'soft line break',
(WidgetTester tester) async {
const String data = 'line 1.\nline 2.';
await tester.pumpWidget(
boilerplate(
const MarkdownBody(
data: data,
softLineBreak: true,
),
),
);
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(
widgets, <Type>[MarkdownBody, Column, Wrap, Text, RichText]);
expectTextStrings(widgets, <String>['line 1.\nline 2.']);
},
);
});
group('Selectable', () {
testWidgets(
'header with line of text',
(WidgetTester tester) async {
const String data = '# Title\nHello _World_!';
await tester.pumpWidget(
boilerplate(
const MediaQuery(
data: MediaQueryData(),
child: Markdown(
data: data,
selectable: true,
),
),
),
);
expect(find.byType(SelectableText), findsNWidgets(2));
},
);
testWidgets(
'header with line of text and onTap callback',
(WidgetTester tester) async {
const String data = '# Title\nHello _World_!';
String? textTapResults;
await tester.pumpWidget(
boilerplate(
MediaQuery(
data: const MediaQueryData(),
child: Markdown(
data: data,
selectable: true,
onTapText: () => textTapResults = 'Text has been tapped.',
),
),
),
);
final Iterable<Widget> selectableWidgets =
tester.widgetList(find.byType(SelectableText));
expect(selectableWidgets.length, 2);
final SelectableText selectableTitle =
selectableWidgets.first as SelectableText;
expect(selectableTitle, isNotNull);
expect(selectableTitle.onTap, isNotNull);
selectableTitle.onTap!();
expect(textTapResults == 'Text has been tapped.', true);
textTapResults = null;
final SelectableText selectableText =
selectableWidgets.last as SelectableText;
expect(selectableText, isNotNull);
expect(selectableText.onTap, isNotNull);
selectableText.onTap!();
expect(textTapResults == 'Text has been tapped.', true);
},
);
testWidgets(
'header with line of text and onSelectionChanged callback',
(WidgetTester tester) async {
const String data = '# abc def ghi\njkl opq';
String? selectableText;
String? selectedText;
void onSelectionChanged(String? text, TextSelection selection,
SelectionChangedCause? cause) {
selectableText = text;
selectedText = text != null ? selection.textInside(text) : null;
}
await tester.pumpWidget(
MaterialApp(
home: Material(
child: MarkdownBody(
data: data,
selectable: true,
onSelectionChanged: onSelectionChanged,
),
),
),
);
// Find the positions before character 'd' and 'f'.
final Offset dPos = positionInRenderedText(tester, 'abc def ghi', 4);
final Offset fPos = positionInRenderedText(tester, 'abc def ghi', 6);
// Select from 'd' until 'f'.
final TestGesture firstGesture =
await tester.startGesture(dPos, kind: PointerDeviceKind.mouse);
addTearDown(firstGesture.removePointer);
await tester.pump();
await firstGesture.moveTo(fPos);
await firstGesture.up();
await tester.pump();
expect(selectableText, 'abc def ghi');
expect(selectedText, 'de');
// Find the positions before character 'j' and 'o'.
final Offset jPos = positionInRenderedText(tester, 'jkl opq', 0);
final Offset oPos = positionInRenderedText(tester, 'jkl opq', 4);
// Select from 'j' until 'o'.
final TestGesture secondGesture =
await tester.startGesture(jPos, kind: PointerDeviceKind.mouse);
addTearDown(secondGesture.removePointer);
await tester.pump();
await secondGesture.moveTo(oPos);
await secondGesture.up();
await tester.pump();
expect(selectableText, 'jkl opq');
expect(selectedText, 'jkl ');
},
);
});
group('Strikethrough', () {
testWidgets('single word', (WidgetTester tester) async {
const String data = '~~strikethrough~~';
await tester.pumpWidget(
boilerplate(
const MarkdownBody(data: data),
),
);
final Iterable<Widget> widgets = selfAndDescendantWidgetsOf(
find.byType(MarkdownBody),
tester,
);
expectWidgetTypes(widgets, <Type>[
MarkdownBody,
Column,
Wrap,
Text,
RichText,
]);
expectTextStrings(widgets, <String>['strikethrough']);
});
});
}
| packages/packages/flutter_markdown/test/text_test.dart/0 | {
"file_path": "packages/packages/flutter_markdown/test/text_test.dart",
"repo_id": "packages",
"token_count": 4997
} | 1,008 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file serves as the single point of entry into the `dart:io` APIs
// within Flutter tools.
//
// In order to make Flutter tools more testable, we use the `FileSystem` APIs
// in `package:file` rather than using the `dart:io` file APIs directly (see
// `file_system.dart`). Doing so allows us to swap out local file system
// access with mockable (or in-memory) file systems, making our tests hermetic
// vis-a-vis file system access.
//
// We also use `package:platform` to provide an abstraction away from the
// static methods in the `dart:io` `Platform` class (see `platform.dart`). As
// such, do not export Platform from this file!
//
// To ensure that all file system and platform API access within Flutter tools
// goes through the proper APIs, we forbid direct imports of `dart:io` (via a
// test), forcing all callers to instead import this file, which exports the
// blessed subset of `dart:io` that is legal to use in Flutter tools.
//
// Because of the nature of this file, it is important that **platform and file
// APIs not be exported from `dart:io` in this file**! Moreover, be careful
// about any additional exports that you add to this file, as doing so will
// increase the API surface that we have to test in Flutter tools, and the APIs
// in `dart:io` can sometimes be hard to use in tests.
// We allow `print()` in this file as a fallback for writing to the terminal via
// regular stdout/stderr/stdio paths. Everything else in the flutter_tools
// library should route terminal I/O through the [Stdio] class defined below.
// ignore_for_file: avoid_print
import 'dart:async';
import 'dart:io' as io
show
IOSink,
Process,
ProcessSignal,
Stdin,
StdinException,
Stdout,
StdoutException,
stderr,
stdin,
stdout;
import 'package:meta/meta.dart';
import 'common.dart';
export 'dart:io'
show
BytesBuilder,
CompressionOptions,
// Directory, NO! Use `file_system.dart`
// File, NO! Use `file_system.dart`
// FileSystemEntity, NO! Use `file_system.dart`
GZipCodec,
HandshakeException,
HttpClient,
HttpClientRequest,
HttpClientResponse,
HttpClientResponseCompressionState,
HttpException,
HttpHeaders,
HttpRequest,
HttpResponse,
HttpServer,
HttpStatus,
IOException,
IOSink,
InternetAddress,
InternetAddressType,
// Link NO! Use `file_system.dart`
// NetworkInterface NO! Use `io.dart`
OSError,
Platform,
Process,
ProcessException,
// ProcessInfo, NO! use `io.dart`
ProcessResult,
// ProcessSignal NO! Use [ProcessSignal] below.
ProcessStartMode,
// RandomAccessFile NO! Use `file_system.dart`
ServerSocket,
SignalException,
Socket,
SocketException,
Stdin,
StdinException,
Stdout,
WebSocket,
WebSocketException,
WebSocketTransformer,
ZLibEncoder,
exitCode,
gzip,
pid,
// stderr, NO! Use `io.dart`
// stdin, NO! Use `io.dart`
// stdout, NO! Use `io.dart`
systemEncoding;
/// A class that wraps stdout, stderr, and stdin, and exposes the allowed
/// operations.
///
/// In particular, there are three ways that writing to stdout and stderr
/// can fail. A call to stdout.write() can fail:
/// * by throwing a regular synchronous exception,
/// * by throwing an exception asynchronously, and
/// * by completing the Future stdout.done with an error.
///
/// This class enapsulates all three so that we don't have to worry about it
/// anywhere else.
class Stdio {
Stdio();
/// Tests can provide overrides to use instead of the stdout and stderr from
/// dart:io.
@visibleForTesting
Stdio.test({
required io.Stdout stdout,
required io.IOSink stderr,
}) : _stdoutOverride = stdout,
_stderrOverride = stderr;
io.Stdout? _stdoutOverride;
io.IOSink? _stderrOverride;
// These flags exist to remember when the done Futures on stdout and stderr
// complete to avoid trying to write to a closed stream sink, which would
// generate a [StateError].
bool _stdoutDone = false;
bool _stderrDone = false;
Stream<List<int>> get stdin => io.stdin;
io.Stdout get stdout {
if (_stdout != null) {
return _stdout!;
}
_stdout = _stdoutOverride ?? io.stdout;
_stdout!.done.then(
(void _) {
_stdoutDone = true;
},
onError: (Object err, StackTrace st) {
_stdoutDone = true;
},
);
return _stdout!;
}
io.Stdout? _stdout;
@visibleForTesting
io.IOSink get stderr {
if (_stderr != null) {
return _stderr!;
}
_stderr = _stderrOverride ?? io.stderr;
_stderr!.done.then(
(void _) {
_stderrDone = true;
},
onError: (Object err, StackTrace st) {
_stderrDone = true;
},
);
return _stderr!;
}
io.IOSink? _stderr;
bool get hasTerminal => io.stdout.hasTerminal;
static bool? _stdinHasTerminal;
/// Determines whether there is a terminal attached.
///
/// [io.Stdin.hasTerminal] only covers a subset of cases. In this check the
/// echoMode is toggled on and off to catch cases where the tool running in
/// a docker container thinks there is an attached terminal. This can cause
/// runtime errors such as "inappropriate ioctl for device" if not handled.
bool get stdinHasTerminal {
if (_stdinHasTerminal != null) {
return _stdinHasTerminal!;
}
if (stdin is! io.Stdin) {
return _stdinHasTerminal = false;
}
final io.Stdin ioStdin = stdin as io.Stdin;
if (!ioStdin.hasTerminal) {
return _stdinHasTerminal = false;
}
try {
final bool currentEchoMode = ioStdin.echoMode;
ioStdin.echoMode = !currentEchoMode;
ioStdin.echoMode = currentEchoMode;
} on io.StdinException {
return _stdinHasTerminal = false;
}
return _stdinHasTerminal = true;
}
int? get terminalColumns => hasTerminal ? stdout.terminalColumns : null;
int? get terminalLines => hasTerminal ? stdout.terminalLines : null;
bool get supportsAnsiEscapes => hasTerminal && stdout.supportsAnsiEscapes;
/// Writes [message] to [stderr], falling back on [fallback] if the write
/// throws any exception. The default fallback calls [print] on [message].
void stderrWrite(
String message, {
void Function(String, dynamic, StackTrace)? fallback,
}) {
if (!_stderrDone) {
_stdioWrite(stderr, message, fallback: fallback);
return;
}
fallback == null
? print(message)
: fallback(
message,
const io.StdoutException('stderr is done'),
StackTrace.current,
);
}
/// Writes [message] to [stdout], falling back on [fallback] if the write
/// throws any exception. The default fallback calls [print] on [message].
void stdoutWrite(
String message, {
void Function(String, dynamic, StackTrace)? fallback,
}) {
if (!_stdoutDone) {
_stdioWrite(stdout, message, fallback: fallback);
return;
}
fallback == null
? print(message)
: fallback(
message,
const io.StdoutException('stdout is done'),
StackTrace.current,
);
}
// Helper for [stderrWrite] and [stdoutWrite].
void _stdioWrite(
io.IOSink sink,
String message, {
void Function(String, dynamic, StackTrace)? fallback,
}) {
asyncGuard<void>(() async {
sink.write(message);
}, onError: (Object error, StackTrace stackTrace) {
if (fallback == null) {
print(message);
} else {
fallback(message, error, stackTrace);
}
});
}
/// Adds [stream] to [stdout].
Future<void> addStdoutStream(Stream<List<int>> stream) =>
stdout.addStream(stream);
/// Adds [stream] to [stderr].
Future<void> addStderrStream(Stream<List<int>> stream) =>
stderr.addStream(stream);
}
/// A portable version of [io.ProcessSignal].
///
/// Listening on signals that don't exist on the current platform is just a
/// no-op. This is in contrast to [io.ProcessSignal], where listening to
/// non-existent signals throws an exception.
///
/// This class does NOT implement io.ProcessSignal, because that class uses
/// private fields. This means it cannot be used with, e.g., [Process.killPid].
/// Alternative implementations of the relevant methods that take
/// [ProcessSignal] instances are available on this class (e.g. "send").
class ProcessSignal {
@visibleForTesting
const ProcessSignal(this._delegate);
static const ProcessSignal sigwinch =
PosixProcessSignal(io.ProcessSignal.sigwinch);
static const ProcessSignal sigterm =
PosixProcessSignal(io.ProcessSignal.sigterm);
static const ProcessSignal sigusr1 =
PosixProcessSignal(io.ProcessSignal.sigusr1);
static const ProcessSignal sigusr2 =
PosixProcessSignal(io.ProcessSignal.sigusr2);
static const ProcessSignal sigint = ProcessSignal(io.ProcessSignal.sigint);
static const ProcessSignal sigkill = ProcessSignal(io.ProcessSignal.sigkill);
final io.ProcessSignal _delegate;
Stream<ProcessSignal> watch() {
return _delegate
.watch()
.map<ProcessSignal>((io.ProcessSignal signal) => this);
}
/// Sends the signal to the given process (identified by pid).
///
/// Returns true if the signal was delivered, false otherwise.
///
/// On Windows, this can only be used with [ProcessSignal.sigterm], which
/// terminates the process.
///
/// This is implemented by sending the signal using [Process.killPid].
bool send(int pid) {
assert(!isWindows || this == ProcessSignal.sigterm);
return io.Process.killPid(pid, _delegate);
}
@override
String toString() => _delegate.toString();
}
/// A [ProcessSignal] that is only available on Posix platforms.
///
/// Listening to a [_PosixProcessSignal] is a no-op on Windows.
@visibleForTesting
class PosixProcessSignal extends ProcessSignal {
const PosixProcessSignal(super.wrappedSignal);
@override
Stream<ProcessSignal> watch() {
// This uses the real platform since it invokes dart:io functionality directly.
if (isWindows) {
return const Stream<ProcessSignal>.empty();
}
return super.watch();
}
}
| packages/packages/flutter_migrate/lib/src/base/io.dart/0 | {
"file_path": "packages/packages/flutter_migrate/lib/src/base/io.dart",
"repo_id": "packages",
"token_count": 4064
} | 1,009 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'base/file_system.dart';
import 'utils.dart';
/// Data class that holds all results and generated directories from a computeMigration run.
///
/// mergeResults, addedFiles, and deletedFiles includes the sets of files to be migrated while
/// the other members track the temporary sdk and generated app directories created by the tool.
///
/// The compute function does not clean up the temp directories, as the directories may be reused,
/// so this must be done manually afterwards.
class MigrateResult {
/// Explicitly initialize the MigrateResult.
MigrateResult(
{required this.mergeResults,
required this.addedFiles,
required this.deletedFiles,
required this.tempDirectories,
required this.sdkDirs,
required this.mergeTypeMap,
required this.diffMap,
this.generatedBaseTemplateDirectory,
this.generatedTargetTemplateDirectory});
/// Creates a MigrateResult with all empty members.
MigrateResult.empty()
: mergeResults = <MergeResult>[],
addedFiles = <FilePendingMigration>[],
deletedFiles = <FilePendingMigration>[],
tempDirectories = <Directory>[],
mergeTypeMap = <String, MergeType>{},
diffMap = <String, DiffResult>{},
sdkDirs = <String, Directory>{};
/// The results of merging existing files with the target files.
final List<MergeResult> mergeResults;
/// Tracks the files that are to be newly added to the project.
final List<FilePendingMigration> addedFiles;
/// Tracks the files that are to be deleted from the project.
final List<FilePendingMigration> deletedFiles;
/// Tracks the temporary directories created during the migrate compute process.
final List<Directory> tempDirectories;
/// Mapping between the local path of a file and the type of merge that should be used.
final Map<String, MergeType> mergeTypeMap;
/// Mapping between the local path of a file and the diff between the base and target
/// versions of the file.
final Map<String, DiffResult> diffMap;
/// The root directory of the base app.
Directory? generatedBaseTemplateDirectory;
/// The root directory of the target app.
Directory? generatedTargetTemplateDirectory;
/// The root directories of the Flutter SDK for each revision.
Map<String, Directory> sdkDirs;
}
/// Defines available merge techniques.
enum MergeType {
/// A standard three-way merge.
threeWay,
/// A two way merge that ignores the base version of the file.
twoWay,
/// A `CustomMerge` manually handles the merge.
custom,
}
/// Stores a file that has been marked for migration and metadata about the file.
class FilePendingMigration {
FilePendingMigration(this.localPath, this.file);
String localPath;
File file;
}
| packages/packages/flutter_migrate/lib/src/result.dart/0 | {
"file_path": "packages/packages/flutter_migrate/lib/src/result.dart",
"repo_id": "packages",
"token_count": 830
} | 1,010 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:file/memory.dart';
import 'package:flutter_migrate/src/base/file_system.dart';
import 'package:flutter_migrate/src/base/logger.dart';
import 'package:flutter_migrate/src/manifest.dart';
import 'package:flutter_migrate/src/result.dart';
import 'package:flutter_migrate/src/utils.dart';
import 'src/common.dart';
void main() {
late FileSystem fileSystem;
late File manifestFile;
late BufferLogger logger;
setUpAll(() {
fileSystem = MemoryFileSystem.test();
logger = BufferLogger.test();
manifestFile = fileSystem.file('.migrate_manifest');
});
group('checkAndPrintMigrateStatus', () {
testWithoutContext('empty MigrateResult produces empty output', () async {
final Directory workingDir = fileSystem.directory('migrate_working_dir');
workingDir.createSync(recursive: true);
final MigrateManifest manifest = MigrateManifest(
migrateRootDir: workingDir,
migrateResult: MigrateResult(
mergeResults: <MergeResult>[],
addedFiles: <FilePendingMigration>[],
deletedFiles: <FilePendingMigration>[],
mergeTypeMap: <String, MergeType>{},
diffMap: <String, DiffResult>{},
tempDirectories: <Directory>[],
sdkDirs: <String, Directory>{},
));
checkAndPrintMigrateStatus(manifest, workingDir,
warnConflict: true, logger: logger);
expect(logger.statusText, contains('\n'));
});
testWithoutContext('populated MigrateResult produces correct output',
() async {
final Directory workingDir = fileSystem.directory('migrate_working_dir');
workingDir.createSync(recursive: true);
final MigrateManifest manifest = MigrateManifest(
migrateRootDir: workingDir,
migrateResult: MigrateResult(
mergeResults: <MergeResult>[
StringMergeResult.explicit(
localPath: 'merged_file',
mergedString: 'str',
hasConflict: false,
exitCode: 0,
),
StringMergeResult.explicit(
localPath: 'conflict_file',
mergedString:
'hello\nwow a bunch of lines\n<<<<<<<\n=======\n<<<<<<<\nhi\n',
hasConflict: true,
exitCode: 1,
),
],
addedFiles: <FilePendingMigration>[
FilePendingMigration('added_file', fileSystem.file('added_file'))
],
deletedFiles: <FilePendingMigration>[
FilePendingMigration(
'deleted_file', fileSystem.file('deleted_file'))
],
// The following are ignored by the manifest.
mergeTypeMap: <String, MergeType>{'test': MergeType.threeWay},
diffMap: <String, DiffResult>{},
tempDirectories: <Directory>[],
sdkDirs: <String, Directory>{},
));
final File conflictFile = workingDir.childFile('conflict_file');
conflictFile.writeAsStringSync(
'hello\nwow a bunch of lines\n<<<<<<<\n=======\n<<<<<<<\nhi\n',
flush: true);
checkAndPrintMigrateStatus(manifest, workingDir,
warnConflict: true, logger: logger);
expect(logger.statusText, contains('''
Added files:
- added_file
Deleted files:
- deleted_file
Modified files:
- conflict_file
- merged_file
'''));
});
testWithoutContext('populated MigrateResult detects fixed conflict',
() async {
final Directory workingDir = fileSystem.directory('migrate_working_dir');
workingDir.createSync(recursive: true);
final MigrateManifest manifest = MigrateManifest(
migrateRootDir: workingDir,
migrateResult: MigrateResult(
mergeResults: <MergeResult>[
StringMergeResult.explicit(
localPath: 'merged_file',
mergedString: 'str',
hasConflict: false,
exitCode: 0,
),
StringMergeResult.explicit(
localPath: 'conflict_file',
mergedString:
'hello\nwow a bunch of lines\n<<<<<<<\n=======\n<<<<<<<\nhi\n',
hasConflict: true,
exitCode: 1,
),
],
addedFiles: <FilePendingMigration>[
FilePendingMigration('added_file', fileSystem.file('added_file'))
],
deletedFiles: <FilePendingMigration>[
FilePendingMigration(
'deleted_file', fileSystem.file('deleted_file'))
],
// The following are ignored by the manifest.
mergeTypeMap: <String, MergeType>{'test': MergeType.threeWay},
diffMap: <String, DiffResult>{},
tempDirectories: <Directory>[],
sdkDirs: <String, Directory>{},
));
final File conflictFile = workingDir.childFile('conflict_file');
conflictFile.writeAsStringSync('hello\nwow a bunch of lines\nhi\n',
flush: true);
checkAndPrintMigrateStatus(manifest, workingDir,
warnConflict: true, logger: logger);
expect(logger.statusText, contains('''
Added files:
- added_file
Deleted files:
- deleted_file
Modified files:
- conflict_file
- merged_file
'''));
});
});
group('manifest file parsing', () {
testWithoutContext('empty fails', () async {
manifestFile.writeAsStringSync('');
bool exceptionFound = false;
try {
MigrateManifest.fromFile(manifestFile);
} on Exception catch (e) {
exceptionFound = true;
expect(e.toString(),
'Exception: Invalid .migrate_manifest file in the migrate working directory. File is not a Yaml map.');
}
expect(exceptionFound, true);
});
testWithoutContext('invalid name fails', () async {
manifestFile.writeAsStringSync('''
merged_files:
conflict_files:
added_filessssss:
deleted_files:
''');
bool exceptionFound = false;
try {
MigrateManifest.fromFile(manifestFile);
} on Exception catch (e) {
exceptionFound = true;
expect(e.toString(),
'Exception: Invalid .migrate_manifest file in the migrate working directory. File is missing an entry.');
}
expect(exceptionFound, true);
});
testWithoutContext('missing name fails', () async {
manifestFile.writeAsStringSync('''
merged_files:
conflict_files:
deleted_files:
''');
bool exceptionFound = false;
try {
MigrateManifest.fromFile(manifestFile);
} on Exception catch (e) {
exceptionFound = true;
expect(e.toString(),
'Exception: Invalid .migrate_manifest file in the migrate working directory. File is missing an entry.');
}
expect(exceptionFound, true);
});
testWithoutContext('wrong entry type fails', () async {
manifestFile.writeAsStringSync('''
merged_files:
conflict_files:
other_key:
added_files:
deleted_files:
''');
bool exceptionFound = false;
try {
MigrateManifest.fromFile(manifestFile);
} on Exception catch (e) {
exceptionFound = true;
expect(e.toString(),
'Exception: Invalid .migrate_manifest file in the migrate working directory. Entry is not a Yaml list.');
}
expect(exceptionFound, true);
});
testWithoutContext('unpopulated succeeds', () async {
manifestFile.writeAsStringSync('''
merged_files:
conflict_files:
added_files:
deleted_files:
''');
final MigrateManifest manifest = MigrateManifest.fromFile(manifestFile);
expect(manifest.mergedFiles.isEmpty, true);
expect(manifest.conflictFiles.isEmpty, true);
expect(manifest.addedFiles.isEmpty, true);
expect(manifest.deletedFiles.isEmpty, true);
});
testWithoutContext('order does not matter', () async {
manifestFile.writeAsStringSync('''
added_files:
merged_files:
deleted_files:
conflict_files:
''');
final MigrateManifest manifest = MigrateManifest.fromFile(manifestFile);
expect(manifest.mergedFiles.isEmpty, true);
expect(manifest.conflictFiles.isEmpty, true);
expect(manifest.addedFiles.isEmpty, true);
expect(manifest.deletedFiles.isEmpty, true);
});
testWithoutContext('basic succeeds', () async {
manifestFile.writeAsStringSync('''
merged_files:
- file1
conflict_files:
- file2
added_files:
- file3
deleted_files:
- file4
''');
final MigrateManifest manifest = MigrateManifest.fromFile(manifestFile);
expect(manifest.mergedFiles.isEmpty, false);
expect(manifest.conflictFiles.isEmpty, false);
expect(manifest.addedFiles.isEmpty, false);
expect(manifest.deletedFiles.isEmpty, false);
expect(manifest.mergedFiles.length, 1);
expect(manifest.conflictFiles.length, 1);
expect(manifest.addedFiles.length, 1);
expect(manifest.deletedFiles.length, 1);
expect(manifest.mergedFiles[0], 'file1');
expect(manifest.conflictFiles[0], 'file2');
expect(manifest.addedFiles[0], 'file3');
expect(manifest.deletedFiles[0], 'file4');
});
testWithoutContext('basic multi-list succeeds', () async {
manifestFile.writeAsStringSync('''
merged_files:
- file1
- file2
conflict_files:
added_files:
deleted_files:
- file3
- file4
''');
final MigrateManifest manifest = MigrateManifest.fromFile(manifestFile);
expect(manifest.mergedFiles.isEmpty, false);
expect(manifest.conflictFiles.isEmpty, true);
expect(manifest.addedFiles.isEmpty, true);
expect(manifest.deletedFiles.isEmpty, false);
expect(manifest.mergedFiles.length, 2);
expect(manifest.conflictFiles.length, 0);
expect(manifest.addedFiles.length, 0);
expect(manifest.deletedFiles.length, 2);
expect(manifest.mergedFiles[0], 'file1');
expect(manifest.mergedFiles[1], 'file2');
expect(manifest.deletedFiles[0], 'file3');
expect(manifest.deletedFiles[1], 'file4');
});
});
group('manifest MigrateResult creation', () {
testWithoutContext('empty MigrateResult', () async {
final MigrateManifest manifest = MigrateManifest(
migrateRootDir: fileSystem.directory('root'),
migrateResult: MigrateResult(
mergeResults: <MergeResult>[],
addedFiles: <FilePendingMigration>[],
deletedFiles: <FilePendingMigration>[],
mergeTypeMap: <String, MergeType>{},
diffMap: <String, DiffResult>{},
tempDirectories: <Directory>[],
sdkDirs: <String, Directory>{},
));
expect(manifest.mergedFiles.isEmpty, true);
expect(manifest.conflictFiles.isEmpty, true);
expect(manifest.addedFiles.isEmpty, true);
expect(manifest.deletedFiles.isEmpty, true);
});
testWithoutContext('simple MigrateResult', () async {
final MigrateManifest manifest = MigrateManifest(
migrateRootDir: fileSystem.directory('root'),
migrateResult: MigrateResult(
mergeResults: <MergeResult>[
StringMergeResult.explicit(
localPath: 'merged_file',
mergedString: 'str',
hasConflict: false,
exitCode: 0,
),
StringMergeResult.explicit(
localPath: 'conflict_file',
mergedString: '<<<<<<<<<<<',
hasConflict: true,
exitCode: 1,
),
],
addedFiles: <FilePendingMigration>[
FilePendingMigration('added_file', fileSystem.file('added_file'))
],
deletedFiles: <FilePendingMigration>[
FilePendingMigration(
'deleted_file', fileSystem.file('deleted_file'))
],
// The following are ignored by the manifest.
mergeTypeMap: <String, MergeType>{'test': MergeType.threeWay},
diffMap: <String, DiffResult>{},
tempDirectories: <Directory>[],
sdkDirs: <String, Directory>{},
));
expect(manifest.mergedFiles.isEmpty, false);
expect(manifest.conflictFiles.isEmpty, false);
expect(manifest.addedFiles.isEmpty, false);
expect(manifest.deletedFiles.isEmpty, false);
expect(manifest.mergedFiles.length, 1);
expect(manifest.conflictFiles.length, 1);
expect(manifest.addedFiles.length, 1);
expect(manifest.deletedFiles.length, 1);
expect(manifest.mergedFiles[0], 'merged_file');
expect(manifest.conflictFiles[0], 'conflict_file');
expect(manifest.addedFiles[0], 'added_file');
expect(manifest.deletedFiles[0], 'deleted_file');
});
});
group('manifest write', () {
testWithoutContext('empty', () async {
manifestFile.writeAsStringSync('''
merged_files:
conflict_files:
added_files:
deleted_files:
''');
final MigrateManifest manifest = MigrateManifest.fromFile(manifestFile);
expect(manifest.mergedFiles.isEmpty, true);
expect(manifest.conflictFiles.isEmpty, true);
expect(manifest.addedFiles.isEmpty, true);
expect(manifest.deletedFiles.isEmpty, true);
manifest.writeFile();
expect(manifestFile.readAsStringSync(), '''
merged_files:
conflict_files:
added_files:
deleted_files:
''');
});
testWithoutContext('basic multi-list', () async {
manifestFile.writeAsStringSync('''
merged_files:
- file1
- file2
conflict_files:
added_files:
deleted_files:
- file3
- file4
''');
final MigrateManifest manifest = MigrateManifest.fromFile(manifestFile);
expect(manifest.mergedFiles.isEmpty, false);
expect(manifest.conflictFiles.isEmpty, true);
expect(manifest.addedFiles.isEmpty, true);
expect(manifest.deletedFiles.isEmpty, false);
expect(manifest.mergedFiles.length, 2);
expect(manifest.conflictFiles.length, 0);
expect(manifest.addedFiles.length, 0);
expect(manifest.deletedFiles.length, 2);
expect(manifest.mergedFiles[0], 'file1');
expect(manifest.mergedFiles[1], 'file2');
expect(manifest.deletedFiles[0], 'file3');
expect(manifest.deletedFiles[1], 'file4');
manifest.writeFile();
expect(manifestFile.readAsStringSync(), '''
merged_files:
- file1
- file2
conflict_files:
added_files:
deleted_files:
- file3
- file4
''');
});
});
}
| packages/packages/flutter_migrate/test/manifest_test.dart/0 | {
"file_path": "packages/packages/flutter_migrate/test/manifest_test.dart",
"repo_id": "packages",
"token_count": 6562
} | 1,011 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.