mirror of
https://git.selfprivacy.org/kherel/selfprivacy.org.app.git
synced 2025-03-19 04:49:54 +00:00
chore: Move basic functionality of Digital Ocean to provider layer
This commit is contained in:
parent
cd59c19c9c
commit
76536f8115
4 changed files with 308 additions and 223 deletions
|
@ -2,7 +2,6 @@ import 'dart:convert';
|
||||||
import 'dart:io';
|
import 'dart:io';
|
||||||
|
|
||||||
import 'package:dio/dio.dart';
|
import 'package:dio/dio.dart';
|
||||||
import 'package:easy_localization/easy_localization.dart';
|
|
||||||
import 'package:selfprivacy/config/get_it_config.dart';
|
import 'package:selfprivacy/config/get_it_config.dart';
|
||||||
import 'package:selfprivacy/logic/api_maps/rest_maps/server_providers/volume_provider.dart';
|
import 'package:selfprivacy/logic/api_maps/rest_maps/server_providers/volume_provider.dart';
|
||||||
import 'package:selfprivacy/logic/api_maps/rest_maps/server_providers/server_provider.dart';
|
import 'package:selfprivacy/logic/api_maps/rest_maps/server_providers/server_provider.dart';
|
||||||
|
@ -11,13 +10,9 @@ import 'package:selfprivacy/logic/models/disk_size.dart';
|
||||||
import 'package:selfprivacy/logic/models/hive/server_domain.dart';
|
import 'package:selfprivacy/logic/models/hive/server_domain.dart';
|
||||||
import 'package:selfprivacy/logic/models/hive/server_details.dart';
|
import 'package:selfprivacy/logic/models/hive/server_details.dart';
|
||||||
import 'package:selfprivacy/logic/models/hive/user.dart';
|
import 'package:selfprivacy/logic/models/hive/user.dart';
|
||||||
import 'package:selfprivacy/logic/models/metrics.dart';
|
|
||||||
import 'package:selfprivacy/logic/models/price.dart';
|
import 'package:selfprivacy/logic/models/price.dart';
|
||||||
import 'package:selfprivacy/logic/models/server_basic_info.dart';
|
import 'package:selfprivacy/logic/models/server_basic_info.dart';
|
||||||
import 'package:selfprivacy/logic/models/server_metadata.dart';
|
|
||||||
import 'package:selfprivacy/logic/models/server_provider_location.dart';
|
import 'package:selfprivacy/logic/models/server_provider_location.dart';
|
||||||
import 'package:selfprivacy/logic/models/server_type.dart';
|
|
||||||
import 'package:selfprivacy/utils/extensions/string_extensions.dart';
|
|
||||||
import 'package:selfprivacy/utils/network_utils.dart';
|
import 'package:selfprivacy/utils/network_utils.dart';
|
||||||
import 'package:selfprivacy/utils/password_generator.dart';
|
import 'package:selfprivacy/utils/password_generator.dart';
|
||||||
|
|
||||||
|
@ -107,13 +102,11 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
|
|
||||||
/// Hardcoded on their documentation and there is no pricing API at all
|
/// Hardcoded on their documentation and there is no pricing API at all
|
||||||
/// Probably we should scrap the doc page manually
|
/// Probably we should scrap the doc page manually
|
||||||
@override
|
|
||||||
Future<Price?> getPricePerGb() async => Price(
|
Future<Price?> getPricePerGb() async => Price(
|
||||||
value: 0.10,
|
value: 0.10,
|
||||||
currency: 'USD',
|
currency: 'USD',
|
||||||
);
|
);
|
||||||
|
|
||||||
@override
|
|
||||||
Future<GenericResult<ServerVolume?>> createVolume() async {
|
Future<GenericResult<ServerVolume?>> createVolume() async {
|
||||||
ServerVolume? volume;
|
ServerVolume? volume;
|
||||||
|
|
||||||
|
@ -163,7 +156,6 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
Future<List<ServerVolume>> getVolumes({final String? status}) async {
|
Future<List<ServerVolume>> getVolumes({final String? status}) async {
|
||||||
final List<ServerVolume> volumes = [];
|
final List<ServerVolume> volumes = [];
|
||||||
|
|
||||||
|
@ -216,7 +208,6 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
return requestedVolume;
|
return requestedVolume;
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
Future<void> deleteVolume(final ServerVolume volume) async {
|
Future<void> deleteVolume(final ServerVolume volume) async {
|
||||||
final Dio client = await getClient();
|
final Dio client = await getClient();
|
||||||
try {
|
try {
|
||||||
|
@ -228,7 +219,6 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
Future<GenericResult<bool>> attachVolume(
|
Future<GenericResult<bool>> attachVolume(
|
||||||
final ServerVolume volume,
|
final ServerVolume volume,
|
||||||
final int serverId,
|
final int serverId,
|
||||||
|
@ -268,7 +258,6 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
Future<bool> detachVolume(final ServerVolume volume) async {
|
Future<bool> detachVolume(final ServerVolume volume) async {
|
||||||
bool success = false;
|
bool success = false;
|
||||||
|
|
||||||
|
@ -295,7 +284,6 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
Future<bool> resizeVolume(
|
Future<bool> resizeVolume(
|
||||||
final ServerVolume volume,
|
final ServerVolume volume,
|
||||||
final DiskSize size,
|
final DiskSize size,
|
||||||
|
@ -325,7 +313,6 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
Future<GenericResult<ServerHostingDetails?>> createServer({
|
Future<GenericResult<ServerHostingDetails?>> createServer({
|
||||||
required final String dnsApiToken,
|
required final String dnsApiToken,
|
||||||
required final User rootUser,
|
required final User rootUser,
|
||||||
|
@ -472,132 +459,65 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
close(client);
|
close(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
return GenericResult(
|
return GenericResult(success: true, data: true);
|
||||||
success: true,
|
|
||||||
data: true,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
Future<GenericResult<void>> restart(final int serverId) async {
|
||||||
Future<ServerHostingDetails> restart() async {
|
|
||||||
final ServerHostingDetails server = getIt<ApiConfigModel>().serverDetails!;
|
|
||||||
|
|
||||||
final Dio client = await getClient();
|
final Dio client = await getClient();
|
||||||
try {
|
try {
|
||||||
await client.post(
|
await client.post(
|
||||||
'/droplets/${server.id}/actions',
|
'/droplets/$serverId/actions',
|
||||||
data: {
|
data: {
|
||||||
'type': 'reboot',
|
'type': 'reboot',
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
print(e);
|
print(e);
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: null,
|
||||||
|
message: e.toString(),
|
||||||
|
);
|
||||||
} finally {
|
} finally {
|
||||||
close(client);
|
close(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
return server.copyWith(startTime: DateTime.now());
|
return GenericResult(success: true, data: null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
Future<GenericResult<void>> powerOn(final int serverId) async {
|
||||||
Future<ServerHostingDetails> powerOn() async {
|
|
||||||
final ServerHostingDetails server = getIt<ApiConfigModel>().serverDetails!;
|
|
||||||
|
|
||||||
final Dio client = await getClient();
|
final Dio client = await getClient();
|
||||||
try {
|
try {
|
||||||
await client.post(
|
await client.post(
|
||||||
'/droplets/${server.id}/actions',
|
'/droplets/$serverId/actions',
|
||||||
data: {
|
data: {
|
||||||
'type': 'power_on',
|
'type': 'power_on',
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
print(e);
|
print(e);
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: null,
|
||||||
|
message: e.toString(),
|
||||||
|
);
|
||||||
} finally {
|
} finally {
|
||||||
close(client);
|
close(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
return server.copyWith(startTime: DateTime.now());
|
return GenericResult(success: true, data: null);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Digital Ocean returns a map of lists of /proc/stat values,
|
Future<GenericResult<List>> getMetricsCpu(
|
||||||
/// so here we are trying to implement average CPU
|
|
||||||
/// load calculation for each point in time on a given interval.
|
|
||||||
///
|
|
||||||
/// For each point of time:
|
|
||||||
///
|
|
||||||
/// `Average Load = 100 * (1 - (Idle Load / Total Load))`
|
|
||||||
///
|
|
||||||
/// For more info please proceed to read:
|
|
||||||
/// https://rosettacode.org/wiki/Linux_CPU_utilization
|
|
||||||
List<TimeSeriesData> calculateCpuLoadMetrics(final List rawProcStatMetrics) {
|
|
||||||
final List<TimeSeriesData> cpuLoads = [];
|
|
||||||
|
|
||||||
final int pointsInTime = (rawProcStatMetrics[0]['values'] as List).length;
|
|
||||||
for (int i = 0; i < pointsInTime; ++i) {
|
|
||||||
double currentMetricLoad = 0.0;
|
|
||||||
double? currentMetricIdle;
|
|
||||||
for (final rawProcStat in rawProcStatMetrics) {
|
|
||||||
final String rawProcValue = rawProcStat['values'][i][1];
|
|
||||||
// Converting MBit into bit
|
|
||||||
final double procValue = double.parse(rawProcValue) * 1000000;
|
|
||||||
currentMetricLoad += procValue;
|
|
||||||
if (currentMetricIdle == null &&
|
|
||||||
rawProcStat['metric']['mode'] == 'idle') {
|
|
||||||
currentMetricIdle = procValue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
currentMetricIdle ??= 0.0;
|
|
||||||
currentMetricLoad = 100.0 * (1 - (currentMetricIdle / currentMetricLoad));
|
|
||||||
cpuLoads.add(
|
|
||||||
TimeSeriesData(
|
|
||||||
rawProcStatMetrics[0]['values'][i][0],
|
|
||||||
currentMetricLoad,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return cpuLoads;
|
|
||||||
}
|
|
||||||
|
|
||||||
@override
|
|
||||||
Future<ServerMetrics?> getMetrics(
|
|
||||||
final int serverId,
|
final int serverId,
|
||||||
final DateTime start,
|
final DateTime start,
|
||||||
final DateTime end,
|
final DateTime end,
|
||||||
) async {
|
) async {
|
||||||
ServerMetrics? metrics;
|
List metrics = [];
|
||||||
|
|
||||||
const int step = 15;
|
|
||||||
final Dio client = await getClient();
|
final Dio client = await getClient();
|
||||||
try {
|
try {
|
||||||
Response response = await client.get(
|
final Response response = await client.get(
|
||||||
'/monitoring/metrics/droplet/bandwidth',
|
|
||||||
queryParameters: {
|
|
||||||
'start': '${(start.microsecondsSinceEpoch / 1000000).round()}',
|
|
||||||
'end': '${(end.microsecondsSinceEpoch / 1000000).round()}',
|
|
||||||
'host_id': '$serverId',
|
|
||||||
'interface': 'public',
|
|
||||||
'direction': 'inbound',
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
final List inbound = response.data['data']['result'][0]['values'];
|
|
||||||
|
|
||||||
response = await client.get(
|
|
||||||
'/monitoring/metrics/droplet/bandwidth',
|
|
||||||
queryParameters: {
|
|
||||||
'start': '${(start.microsecondsSinceEpoch / 1000000).round()}',
|
|
||||||
'end': '${(end.microsecondsSinceEpoch / 1000000).round()}',
|
|
||||||
'host_id': '$serverId',
|
|
||||||
'interface': 'public',
|
|
||||||
'direction': 'outbound',
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
final List outbound = response.data['data']['result'][0]['values'];
|
|
||||||
|
|
||||||
response = await client.get(
|
|
||||||
'/monitoring/metrics/droplet/cpu',
|
'/monitoring/metrics/droplet/cpu',
|
||||||
queryParameters: {
|
queryParameters: {
|
||||||
'start': '${(start.microsecondsSinceEpoch / 1000000).round()}',
|
'start': '${(start.microsecondsSinceEpoch / 1000000).round()}',
|
||||||
|
@ -605,122 +525,75 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
'host_id': '$serverId',
|
'host_id': '$serverId',
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
metrics = response.data['data']['result'];
|
||||||
metrics = ServerMetrics(
|
|
||||||
bandwidthIn: inbound
|
|
||||||
.map(
|
|
||||||
(final el) => TimeSeriesData(el[0], double.parse(el[1]) * 100000),
|
|
||||||
)
|
|
||||||
.toList(),
|
|
||||||
bandwidthOut: outbound
|
|
||||||
.map(
|
|
||||||
(final el) => TimeSeriesData(el[0], double.parse(el[1]) * 100000),
|
|
||||||
)
|
|
||||||
.toList(),
|
|
||||||
cpu: calculateCpuLoadMetrics(response.data['data']['result']),
|
|
||||||
start: start,
|
|
||||||
end: end,
|
|
||||||
stepsInSecond: step,
|
|
||||||
);
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
print(e);
|
print(e);
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: [],
|
||||||
|
message: e.toString(),
|
||||||
|
);
|
||||||
} finally {
|
} finally {
|
||||||
close(client);
|
close(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
return metrics;
|
return GenericResult(success: true, data: metrics);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
Future<GenericResult<List>> getMetricsBandwidth(
|
||||||
Future<List<ServerMetadataEntity>> getMetadata(final int serverId) async {
|
final int serverId,
|
||||||
List<ServerMetadataEntity> metadata = [];
|
final DateTime start,
|
||||||
|
final DateTime end,
|
||||||
|
final bool isInbound,
|
||||||
|
) async {
|
||||||
|
List metrics = [];
|
||||||
|
|
||||||
final Dio client = await getClient();
|
final Dio client = await getClient();
|
||||||
try {
|
try {
|
||||||
final Response response = await client.get('/droplets/$serverId');
|
final Response response = await client.get(
|
||||||
final droplet = response.data!['droplet'];
|
'/monitoring/metrics/droplet/bandwidth',
|
||||||
metadata = [
|
queryParameters: {
|
||||||
ServerMetadataEntity(
|
'start': '${(start.microsecondsSinceEpoch / 1000000).round()}',
|
||||||
type: MetadataType.id,
|
'end': '${(end.microsecondsSinceEpoch / 1000000).round()}',
|
||||||
name: 'server.server_id'.tr(),
|
'host_id': '$serverId',
|
||||||
value: droplet['id'].toString(),
|
'interface': 'public',
|
||||||
),
|
'direction': isInbound ? 'inbound' : 'outbound',
|
||||||
ServerMetadataEntity(
|
},
|
||||||
type: MetadataType.status,
|
);
|
||||||
name: 'server.status'.tr(),
|
metrics = response.data['data']['result'][0]['values'];
|
||||||
value: droplet['status'].toString().capitalize(),
|
|
||||||
),
|
|
||||||
ServerMetadataEntity(
|
|
||||||
type: MetadataType.cpu,
|
|
||||||
name: 'server.cpu'.tr(),
|
|
||||||
value: 'server.core_count'.plural(droplet['vcpus']),
|
|
||||||
),
|
|
||||||
ServerMetadataEntity(
|
|
||||||
type: MetadataType.ram,
|
|
||||||
name: 'server.ram'.tr(),
|
|
||||||
value: "${droplet['memory'].toString()} MB",
|
|
||||||
),
|
|
||||||
ServerMetadataEntity(
|
|
||||||
type: MetadataType.cost,
|
|
||||||
name: 'server.monthly_cost'.tr(),
|
|
||||||
value: droplet['size']['price_monthly'].toString(),
|
|
||||||
),
|
|
||||||
ServerMetadataEntity(
|
|
||||||
type: MetadataType.location,
|
|
||||||
name: 'server.location'.tr(),
|
|
||||||
value:
|
|
||||||
'${droplet['region']['name']} ${getEmojiFlag(droplet['region']['slug'].toString()) ?? ''}',
|
|
||||||
),
|
|
||||||
ServerMetadataEntity(
|
|
||||||
type: MetadataType.other,
|
|
||||||
name: 'server.provider'.tr(),
|
|
||||||
value: displayProviderName,
|
|
||||||
),
|
|
||||||
];
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
print(e);
|
print(e);
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: [],
|
||||||
|
message: e.toString(),
|
||||||
|
);
|
||||||
} finally {
|
} finally {
|
||||||
close(client);
|
close(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
return metadata;
|
return GenericResult(success: true, data: metrics);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
Future<GenericResult<List>> getServers() async {
|
||||||
Future<List<ServerBasicInfo>> getServers() async {
|
List servers = [];
|
||||||
List<ServerBasicInfo> servers = [];
|
|
||||||
|
|
||||||
final Dio client = await getClient();
|
final Dio client = await getClient();
|
||||||
try {
|
try {
|
||||||
final Response response = await client.get('/droplets');
|
final Response response = await client.get('/droplets');
|
||||||
servers = response.data!['droplets'].map<ServerBasicInfo>(
|
servers = response.data;
|
||||||
(final server) {
|
|
||||||
String ipv4 = '0.0.0.0';
|
|
||||||
if (server['networks']['v4'].isNotEmpty) {
|
|
||||||
for (final v4 in server['networks']['v4']) {
|
|
||||||
if (v4['type'].toString() == 'public') {
|
|
||||||
ipv4 = v4['ip_address'].toString();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ServerBasicInfo(
|
|
||||||
id: server['id'],
|
|
||||||
reverseDns: server['name'],
|
|
||||||
created: DateTime.now(),
|
|
||||||
ip: ipv4,
|
|
||||||
name: server['name'],
|
|
||||||
);
|
|
||||||
},
|
|
||||||
).toList();
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
print(e);
|
print(e);
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: servers,
|
||||||
|
message: e.toString(),
|
||||||
|
);
|
||||||
} finally {
|
} finally {
|
||||||
close(client);
|
close(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
print(servers);
|
return GenericResult(success: true, data: servers);
|
||||||
return servers;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<GenericResult<List>> getAvailableLocations() async {
|
Future<GenericResult<List>> getAvailableLocations() async {
|
||||||
|
@ -769,21 +642,4 @@ class DigitalOceanApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
|
|
||||||
return GenericResult(data: types, success: true);
|
return GenericResult(data: types, success: true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
Future<GenericResult<void>> createReverseDns({
|
|
||||||
required final ServerHostingDetails serverDetails,
|
|
||||||
required final ServerDomain domain,
|
|
||||||
}) async {
|
|
||||||
/// TODO remove from provider interface
|
|
||||||
const bool success = true;
|
|
||||||
return GenericResult(success: success, data: null);
|
|
||||||
}
|
|
||||||
|
|
||||||
@override
|
|
||||||
ProviderApiTokenValidation getApiTokenValidation() =>
|
|
||||||
ProviderApiTokenValidation(
|
|
||||||
regexp: RegExp(r'\s+|[-!$%^&*()@+|~=`{}\[\]:<>?,.\/]'),
|
|
||||||
length: 71,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -455,10 +455,7 @@ class HetznerApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
close(client);
|
close(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
return GenericResult(
|
return GenericResult(success: true, data: null);
|
||||||
success: true,
|
|
||||||
data: null,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<GenericResult<void>> powerOn(final int serverId) async {
|
Future<GenericResult<void>> powerOn(final int serverId) async {
|
||||||
|
@ -476,10 +473,7 @@ class HetznerApi extends ServerProviderApi with VolumeProviderApi {
|
||||||
close(client);
|
close(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
return GenericResult(
|
return GenericResult(success: true, data: null);
|
||||||
success: true,
|
|
||||||
data: null,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<GenericResult<Map<String, dynamic>>> getMetrics(
|
Future<GenericResult<Map<String, dynamic>>> getMetrics(
|
||||||
|
|
|
@ -1,7 +1,13 @@
|
||||||
import 'package:selfprivacy/logic/api_maps/rest_maps/server_providers/digital_ocean/digital_ocean_api.dart';
|
import 'package:selfprivacy/logic/api_maps/rest_maps/server_providers/digital_ocean/digital_ocean_api.dart';
|
||||||
|
import 'package:selfprivacy/logic/models/disk_size.dart';
|
||||||
|
import 'package:selfprivacy/logic/models/metrics.dart';
|
||||||
|
import 'package:selfprivacy/logic/models/price.dart';
|
||||||
|
import 'package:selfprivacy/logic/models/server_basic_info.dart';
|
||||||
|
import 'package:selfprivacy/logic/models/server_metadata.dart';
|
||||||
import 'package:selfprivacy/logic/models/server_provider_location.dart';
|
import 'package:selfprivacy/logic/models/server_provider_location.dart';
|
||||||
import 'package:selfprivacy/logic/models/server_type.dart';
|
import 'package:selfprivacy/logic/models/server_type.dart';
|
||||||
import 'package:selfprivacy/logic/providers/server_provider.dart';
|
import 'package:selfprivacy/logic/providers/server_provider.dart';
|
||||||
|
import 'package:selfprivacy/utils/extensions/string_extensions.dart';
|
||||||
|
|
||||||
class ApiAdapter {
|
class ApiAdapter {
|
||||||
ApiAdapter({final String? region, final bool isWithToken = true})
|
ApiAdapter({final String? region, final bool isWithToken = true})
|
||||||
|
@ -149,7 +155,7 @@ class DigitalOceanServerProvider extends ServerProvider {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
final List rawTypes = result.data;
|
final List rawSizes = result.data;
|
||||||
for (final rawSize in rawSizes) {
|
for (final rawSize in rawSizes) {
|
||||||
for (final rawRegion in rawSize['regions']) {
|
for (final rawRegion in rawSize['regions']) {
|
||||||
final ramMb = rawSize['memory'].toDouble();
|
final ramMb = rawSize['memory'].toDouble();
|
||||||
|
@ -174,4 +180,243 @@ class DigitalOceanServerProvider extends ServerProvider {
|
||||||
|
|
||||||
return GenericResult(success: true, data: types);
|
return GenericResult(success: true, data: types);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Future<GenericResult<List<ServerBasicInfo>>> getServers() async {
|
||||||
|
final List<ServerBasicInfo> servers = [];
|
||||||
|
final result = await _adapter.api().getServers();
|
||||||
|
if (result.data.isEmpty || !result.success) {
|
||||||
|
return GenericResult(
|
||||||
|
success: result.success,
|
||||||
|
data: servers,
|
||||||
|
code: result.code,
|
||||||
|
message: result.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
final List rawServers = result.data;
|
||||||
|
rawServers.map<ServerBasicInfo>(
|
||||||
|
(final server) {
|
||||||
|
String ipv4 = '0.0.0.0';
|
||||||
|
if (server['networks']['v4'].isNotEmpty) {
|
||||||
|
for (final v4 in server['networks']['v4']) {
|
||||||
|
if (v4['type'].toString() == 'public') {
|
||||||
|
ipv4 = v4['ip_address'].toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ServerBasicInfo(
|
||||||
|
id: server['id'],
|
||||||
|
reverseDns: server['name'],
|
||||||
|
created: DateTime.now(),
|
||||||
|
ip: ipv4,
|
||||||
|
name: server['name'],
|
||||||
|
);
|
||||||
|
},
|
||||||
|
).toList();
|
||||||
|
|
||||||
|
return GenericResult(success: true, data: servers);
|
||||||
|
}
|
||||||
|
|
||||||
|
Future<GenericResult<List<ServerMetadataEntity>>> getMetadata(
|
||||||
|
final int serverId,
|
||||||
|
) async {
|
||||||
|
List<ServerMetadataEntity> metadata = [];
|
||||||
|
final result = await _adapter.api().getServers();
|
||||||
|
if (result.data.isEmpty || !result.success) {
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: metadata,
|
||||||
|
code: result.code,
|
||||||
|
message: result.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
final List servers = result.data;
|
||||||
|
try {
|
||||||
|
final droplet = servers.firstWhere(
|
||||||
|
(final server) => server['id'] == serverId,
|
||||||
|
);
|
||||||
|
|
||||||
|
metadata = [
|
||||||
|
ServerMetadataEntity(
|
||||||
|
type: MetadataType.id,
|
||||||
|
trId: 'server.server_id',
|
||||||
|
value: droplet['id'].toString(),
|
||||||
|
),
|
||||||
|
ServerMetadataEntity(
|
||||||
|
type: MetadataType.status,
|
||||||
|
trId: 'server.status',
|
||||||
|
value: droplet['status'].toString().capitalize(),
|
||||||
|
),
|
||||||
|
ServerMetadataEntity(
|
||||||
|
type: MetadataType.cpu,
|
||||||
|
trId: 'server.cpu',
|
||||||
|
value: droplet['vcpus'].toString(),
|
||||||
|
),
|
||||||
|
ServerMetadataEntity(
|
||||||
|
type: MetadataType.ram,
|
||||||
|
trId: 'server.ram',
|
||||||
|
value: "${droplet['memory'].toString()} MB",
|
||||||
|
),
|
||||||
|
ServerMetadataEntity(
|
||||||
|
type: MetadataType.cost,
|
||||||
|
trId: 'server.monthly_cost',
|
||||||
|
value: droplet['size']['price_monthly'].toString(),
|
||||||
|
),
|
||||||
|
ServerMetadataEntity(
|
||||||
|
type: MetadataType.location,
|
||||||
|
trId: 'server.location',
|
||||||
|
value:
|
||||||
|
'${droplet['region']['name']} ${getEmojiFlag(droplet['region']['slug'].toString()) ?? ''}',
|
||||||
|
),
|
||||||
|
ServerMetadataEntity(
|
||||||
|
type: MetadataType.other,
|
||||||
|
trId: 'server.provider',
|
||||||
|
value: _adapter.api().displayProviderName,
|
||||||
|
),
|
||||||
|
];
|
||||||
|
} catch (e) {
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: [],
|
||||||
|
message: e.toString(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return GenericResult(success: true, data: metadata);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Digital Ocean returns a map of lists of /proc/stat values,
|
||||||
|
/// so here we are trying to implement average CPU
|
||||||
|
/// load calculation for each point in time on a given interval.
|
||||||
|
///
|
||||||
|
/// For each point of time:
|
||||||
|
///
|
||||||
|
/// `Average Load = 100 * (1 - (Idle Load / Total Load))`
|
||||||
|
///
|
||||||
|
/// For more info please proceed to read:
|
||||||
|
/// https://rosettacode.org/wiki/Linux_CPU_utilization
|
||||||
|
List<TimeSeriesData> calculateCpuLoadMetrics(final List rawProcStatMetrics) {
|
||||||
|
final List<TimeSeriesData> cpuLoads = [];
|
||||||
|
|
||||||
|
final int pointsInTime = (rawProcStatMetrics[0]['values'] as List).length;
|
||||||
|
for (int i = 0; i < pointsInTime; ++i) {
|
||||||
|
double currentMetricLoad = 0.0;
|
||||||
|
double? currentMetricIdle;
|
||||||
|
for (final rawProcStat in rawProcStatMetrics) {
|
||||||
|
final String rawProcValue = rawProcStat['values'][i][1];
|
||||||
|
// Converting MBit into bit
|
||||||
|
final double procValue = double.parse(rawProcValue) * 1000000;
|
||||||
|
currentMetricLoad += procValue;
|
||||||
|
if (currentMetricIdle == null &&
|
||||||
|
rawProcStat['metric']['mode'] == 'idle') {
|
||||||
|
currentMetricIdle = procValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
currentMetricIdle ??= 0.0;
|
||||||
|
currentMetricLoad = 100.0 * (1 - (currentMetricIdle / currentMetricLoad));
|
||||||
|
cpuLoads.add(
|
||||||
|
TimeSeriesData(
|
||||||
|
rawProcStatMetrics[0]['values'][i][0],
|
||||||
|
currentMetricLoad,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return cpuLoads;
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future<GenericResult<ServerMetrics?>> getMetrics(
|
||||||
|
final int serverId,
|
||||||
|
final DateTime start,
|
||||||
|
final DateTime end,
|
||||||
|
) async {
|
||||||
|
ServerMetrics? metrics;
|
||||||
|
|
||||||
|
const int step = 15;
|
||||||
|
final inboundResult = await _adapter.api().getMetricsBandwidth(
|
||||||
|
serverId,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (inboundResult.data.isEmpty || !inboundResult.success) {
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: null,
|
||||||
|
code: inboundResult.code,
|
||||||
|
message: inboundResult.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
final outboundResult = await _adapter.api().getMetricsBandwidth(
|
||||||
|
serverId,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (outboundResult.data.isEmpty || !outboundResult.success) {
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: null,
|
||||||
|
code: outboundResult.code,
|
||||||
|
message: outboundResult.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
final cpuResult = await _adapter.api().getMetricsCpu(serverId, start, end);
|
||||||
|
|
||||||
|
if (cpuResult.data.isEmpty || !cpuResult.success) {
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: null,
|
||||||
|
code: cpuResult.code,
|
||||||
|
message: cpuResult.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
metrics = ServerMetrics(
|
||||||
|
bandwidthIn: inboundResult.data
|
||||||
|
.map(
|
||||||
|
(final el) => TimeSeriesData(el[0], double.parse(el[1]) * 100000),
|
||||||
|
)
|
||||||
|
.toList(),
|
||||||
|
bandwidthOut: outboundResult.data
|
||||||
|
.map(
|
||||||
|
(final el) => TimeSeriesData(el[0], double.parse(el[1]) * 100000),
|
||||||
|
)
|
||||||
|
.toList(),
|
||||||
|
cpu: calculateCpuLoadMetrics(cpuResult.data),
|
||||||
|
start: start,
|
||||||
|
end: end,
|
||||||
|
stepsInSecond: step,
|
||||||
|
);
|
||||||
|
|
||||||
|
return GenericResult(success: true, data: metrics);
|
||||||
|
}
|
||||||
|
|
||||||
|
@override
|
||||||
|
Future<GenericResult<DateTime?>> restart(final int serverId) async {
|
||||||
|
DateTime? timestamp;
|
||||||
|
final result = await _adapter.api().restart(serverId);
|
||||||
|
if (!result.success) {
|
||||||
|
return GenericResult(
|
||||||
|
success: false,
|
||||||
|
data: timestamp,
|
||||||
|
code: result.code,
|
||||||
|
message: result.message,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
timestamp = DateTime.now();
|
||||||
|
|
||||||
|
return GenericResult(
|
||||||
|
success: true,
|
||||||
|
data: timestamp,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -173,16 +173,6 @@ class HetznerServerProvider extends ServerProvider {
|
||||||
return GenericResult(success: true, data: types);
|
return GenericResult(success: true, data: types);
|
||||||
}
|
}
|
||||||
|
|
||||||
Future<GenericResult<void>> createReverseDns({
|
|
||||||
required final ServerHostingDetails serverDetails,
|
|
||||||
required final ServerDomain domain,
|
|
||||||
}) async =>
|
|
||||||
_adapter.api().createReverseDns(
|
|
||||||
serverId: serverDetails.id,
|
|
||||||
ip4: serverDetails.ip4,
|
|
||||||
dnsPtr: domain.domainName,
|
|
||||||
);
|
|
||||||
|
|
||||||
Future<GenericResult<List<ServerBasicInfo>>> getServers() async {
|
Future<GenericResult<List<ServerBasicInfo>>> getServers() async {
|
||||||
final List<ServerBasicInfo> servers = [];
|
final List<ServerBasicInfo> servers = [];
|
||||||
final result = await _adapter.api().getServers();
|
final result = await _adapter.api().getServers();
|
||||||
|
|
Loading…
Add table
Reference in a new issue