+
+
-
-
-
-
-
-
-
{{ task.name }}
-
Description {{ task.description }}
-
+
+
+
-
{{ task.dueDate }}
-
-
-
+
+
+
{{ task.name }}
+
{{ task.description }}
-
-
+
+
{{ task.dueDate }}
+
+
+
+
+
{{ error }}
+
+
\ No newline at end of file
+
diff --git a/Tasker.Ui/src/components/Time.ts b/Tasker.Ui/src/components/Time.ts
new file mode 100644
index 0000000..29446d6
--- /dev/null
+++ b/Tasker.Ui/src/components/Time.ts
@@ -0,0 +1,5 @@
+export interface Time{
+ time: string,
+ timeZone: string,
+ duration: string,
+}
\ No newline at end of file
diff --git a/Tasker.Ui/src/stores/counter.ts b/Tasker.Ui/src/stores/counter.ts
deleted file mode 100644
index 78a54c8..0000000
--- a/Tasker.Ui/src/stores/counter.ts
+++ /dev/null
@@ -1,12 +0,0 @@
-import {ref, computed} from 'vue'
-import { defineStore } from 'pinia'
-
-export const useCounterStore = defineStore('counter', () => {
- const count = ref(0)
- const doubleCount = computed(() => count.value * 2)
- function increment() {
- count.value++
- }
-
- return { count, doubleCount, increment }
-})
diff --git a/Tasker.Ui/src/stores/httpClient.ts b/Tasker.Ui/src/stores/httpClient.ts
new file mode 100644
index 0000000..919fc04
--- /dev/null
+++ b/Tasker.Ui/src/stores/httpClient.ts
@@ -0,0 +1,50 @@
+// src/api/axiosClient.ts
+
+import axios from 'axios'
+
+// Resolve API base URL from Vite environment variables.
+// Configure via `.env` (development) and `.env.production` (production):
+// VITE_API_BASE_URL=https://localhost:7055/
+// If not set, it will fall back to current origin.
+console.log(`BASE URL: ${import.meta.env.VITE_API_BASE_URL}`);
+const envBaseUrl = (import.meta as any)?.env?.VITE_API_BASE_URL as string | undefined
+const resolvedBaseUrl = envBaseUrl && envBaseUrl.trim().length > 0
+ ? envBaseUrl
+ : (typeof window !== 'undefined' ? window.location.origin : '')
+
+console.log(`RESOLVED BASE URL: ${resolvedBaseUrl}`);
+// Create an instance of axios
+const httpClient = axios.create({
+ baseURL: resolvedBaseUrl, // API base URL from configuration
+ headers: {
+ 'Content-Type': 'application/json', // Default headers
+ },
+ timeout: 5000, // Set a timeout (optional)
+})
+
+// Optionally, add request/response interceptors
+httpClient.interceptors.request.use(
+ (config) => {
+ // You can modify the request here, e.g., attach auth tokens
+ // config.headers.Authorization = `Bearer ${yourToken}`
+ return config
+ },
+ (error) => {
+ return Promise.reject(error)
+ }
+)
+
+httpClient.interceptors.response.use(
+ (response) => {
+ return response
+ },
+ (error) => {
+ // Handle errors globally, e.g., log out on 401
+ if (error.response?.status === 401) {
+ console.error('Unauthorized, redirect to login')
+ }
+ return Promise.reject(error)
+ }
+)
+
+export default httpClient
\ No newline at end of file
diff --git a/Tasker.Ui/src/stores/project.ts b/Tasker.Ui/src/stores/project.ts
new file mode 100644
index 0000000..960276b
--- /dev/null
+++ b/Tasker.Ui/src/stores/project.ts
@@ -0,0 +1,17 @@
+
+export interface Project {
+ id: string;
+ name: string; // [MaxLength(255)]
+ description?: string | null; // [MaxLength(2048)]
+ dueDate?: string | null;
+ completedOn?: string | null;
+}
+
+export interface CreateProjectRequest {
+ id: string;
+}
+
+export interface ChangeProjectDescription {
+ id: string;
+ completedOn: Date;
+}
diff --git a/Tasker.Ui/src/stores/projectStore.ts b/Tasker.Ui/src/stores/projectStore.ts
new file mode 100644
index 0000000..55046c9
--- /dev/null
+++ b/Tasker.Ui/src/stores/projectStore.ts
@@ -0,0 +1,77 @@
+import { ref } from 'vue'
+import { defineStore } from 'pinia'
+import { projectsApiClient } from '@/stores/projectsApiClient'
+import type { Project } from '@/stores/project'
+
+export const useProjectStore = defineStore(
+ 'projectStore',
+ () => {
+ const projects = ref
([])
+ const isLoading = ref(false)
+ const error = ref(null)
+
+ isLoading.value = true
+ error.value = null
+ projectsApiClient
+ .getProjects()
+ .then(t => projects.value = t)
+ .catch(e => error.value = (e as Error).message)
+ .finally(isLoading.value = false)
+
+ const fetchProjects = async () => {
+ isLoading.value = true
+ error.value = null
+ try {
+ projects.value = await projectsApiClient.getProjects()
+ } catch (err) {
+ error.value = (err as Error).message
+ } finally {
+ isLoading.value = false
+ }
+ }
+
+ const addProject = async (project: project) => {
+ isLoading.value = true
+ error.value = null
+ try {
+ const newProject = await projectsApiClient.createProject(project)
+ projects.value.push(newProject)
+ } catch (err) {
+ error.value = (err as Error).message
+ } finally {
+ isLoading.value = false
+ }
+ }
+
+ const completeProject = async (project: Project) => {
+ isLoading.value = true
+ error.value = null
+ try {
+ if (project.completedOn) {
+ await projectsApiClient.uncompleteProject(project.id)
+ project.completedOn = null
+ } else {
+ project.completedOn = new Date().toISOString()
+ await projectsApiClient.completeProject(
+ {
+ id: project.id,
+ completedOn: project.completedOn
+ })
+ }
+ } catch (err) {
+ error.value = (err as Error).message
+ } finally {
+ isLoading.value = false
+ }
+ }
+
+ return {
+ projects,
+ isLoading,
+ error,
+ fetchProjects,
+ addProject,
+ completeProject
+ }
+ }
+)
\ No newline at end of file
diff --git a/Tasker.Ui/src/stores/projectsApiClient.ts b/Tasker.Ui/src/stores/projectsApiClient.ts
new file mode 100644
index 0000000..de273ba
--- /dev/null
+++ b/Tasker.Ui/src/stores/projectsApiClient.ts
@@ -0,0 +1,44 @@
+import type { Project, CreateProjectRequest, CompleteProjectRequest } from '@/stores/project'
+import httpClient from '@/stores/httpClient'
+
+export const projectsApiClient = {
+ // Fetch all Projects
+ async getProjects(): Promise {
+ const response = await httpClient.get('/projects')
+ return response.data
+ },
+
+ // Fetch a single Project by ID
+ async getProjectById(id: string): Promise {
+ const response = await httpClient.get(`/projects/${id}`)
+ return response.data
+ },
+
+ // Create a new Project
+ async createProject(request: CreateProjectRequest): Promise {
+ const response = await httpClient.post('/projects', request)
+ return response.data
+ },
+
+ // Update an existing Project
+ async completeProject(request: CompleteProjectRequest): Promise {
+ const response = await httpClient.put(
+ `/projects/complete`,
+ request)
+ return response.data
+ },
+
+ async uncompleteProject(id: string): Promise {
+ const response = await httpClient.put(
+ `/projects/uncomplete`,
+ {
+ id: id
+ })
+ return response.data
+ },
+
+ // Delete a Project
+ async deleteProject(id: string): Promise {
+ await httpClient.delete(`/projects/${id}`)
+ },
+}
diff --git a/Tasker.Ui/src/stores/task.ts b/Tasker.Ui/src/stores/task.ts
new file mode 100644
index 0000000..41cf9a7
--- /dev/null
+++ b/Tasker.Ui/src/stores/task.ts
@@ -0,0 +1,17 @@
+
+export interface Task {
+ id: string;
+ name: string; // [MaxLength(255)]
+ description?: string | null; // [MaxLength(2048)]
+ dueDate?: string | null;
+ completedOn?: string | null;
+}
+
+export interface CreateTaskRequest {
+ id: string;
+}
+
+export interface CompleteTaskRequest {
+ id: string;
+ completedOn: Date;
+}
diff --git a/Tasker.Ui/src/stores/taskStore.ts b/Tasker.Ui/src/stores/taskStore.ts
new file mode 100644
index 0000000..8864bae
--- /dev/null
+++ b/Tasker.Ui/src/stores/taskStore.ts
@@ -0,0 +1,77 @@
+import { ref } from 'vue'
+import { defineStore } from 'pinia'
+import { tasksApiClient } from '@/stores/tasksApiClient'
+import type { Task } from '@/stores/task'
+
+export const useTaskStore = defineStore(
+ 'taskStore',
+ () => {
+ const tasks = ref([])
+ const isLoading = ref(false)
+ const error = ref(null)
+
+ isLoading.value = true
+ error.value = null
+ tasksApiClient
+ .getTasks()
+ .then(t => tasks.value = t)
+ .catch(e => error.value = (e as Error).message)
+ .finally(isLoading.value = false)
+
+ const fetchTasks = async () => {
+ isLoading.value = true
+ error.value = null
+ try {
+ tasks.value = await tasksApiClient.getTasks()
+ } catch (err) {
+ error.value = (err as Error).message
+ } finally {
+ isLoading.value = false
+ }
+ }
+
+ const addTask = async (task: Task) => {
+ isLoading.value = true
+ error.value = null
+ try {
+ const newTask = await tasksApiClient.createTask(task)
+ tasks.value.push(newTask)
+ } catch (err) {
+ error.value = (err as Error).message
+ } finally {
+ isLoading.value = false
+ }
+ }
+
+ const completeTask = async (task: Task) => {
+ isLoading.value = true
+ error.value = null
+ try {
+ if (task.completedOn) {
+ await tasksApiClient.uncompleteTask(task.id)
+ task.completedOn = null
+ } else {
+ task.completedOn = new Date().toISOString()
+ await tasksApiClient.completeTask(
+ {
+ id: task.id,
+ completedOn: task.completedOn
+ })
+ }
+ } catch (err) {
+ error.value = (err as Error).message
+ } finally {
+ isLoading.value = false
+ }
+ }
+
+ return {
+ tasks,
+ isLoading,
+ error,
+ fetchTasks,
+ addTask,
+ completeTask
+ }
+ }
+)
\ No newline at end of file
diff --git a/Tasker.Ui/src/stores/tasksApiClient.ts b/Tasker.Ui/src/stores/tasksApiClient.ts
new file mode 100644
index 0000000..3334995
--- /dev/null
+++ b/Tasker.Ui/src/stores/tasksApiClient.ts
@@ -0,0 +1,46 @@
+import type { Task, CreateTaskRequest, CompleteTaskRequest } from '@/stores/task'
+import httpClient from '@/stores/httpClient'
+
+export const tasksApiClient = {
+ // Fetch all Tasks
+ async getTasks(): Promise {
+ const response = await httpClient.get('/tasks')
+ return response.data
+ },
+
+ // Fetch a single Task by ID
+ async getTaskById(id: string): Promise {
+ const response = await httpClient.get(`/tasks/${id}`)
+ return response.data
+ },
+
+ // Create a new Task
+ async createTask(request: CreateTaskRequest): Promise {
+ const response = await httpClient.post(
+ '/tasks',
+ request)
+ return response.data
+ },
+
+ // Update an existing Task
+ async completeTask(request: CompleteTaskRequest): Promise {
+ const response = await httpClient.put(
+ `/tasks/complete`,
+ request)
+ return response.data
+ },
+
+ async uncompleteTask(id: string): Promise {
+ const response = await httpClient.put(
+ `/tasks/uncomplete`,
+ {
+ id: id
+ })
+ return response.data
+ },
+
+ // Delete a Task
+ async deleteTask(id: string): Promise {
+ await httpClient.delete(`/tasks/${id}`)
+ }
+}
diff --git a/Tasker.Ui/src/views/HomeView.vue b/Tasker.Ui/src/views/HomeView.vue
index ccdc29f..c2074bc 100644
--- a/Tasker.Ui/src/views/HomeView.vue
+++ b/Tasker.Ui/src/views/HomeView.vue
@@ -1,14 +1,16 @@
-
-
+
+
+
+
+
+
+
+
diff --git a/Tasker/Tasker.Web.Tests/InfrastructureFixture.cs b/Tasker/Tasker.Web.Tests/InfrastructureFixture.cs
new file mode 100644
index 0000000..1203852
--- /dev/null
+++ b/Tasker/Tasker.Web.Tests/InfrastructureFixture.cs
@@ -0,0 +1,122 @@
+using Microsoft.AspNetCore.Hosting;
+using Microsoft.AspNetCore.Mvc.Testing;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.Extensions.DependencyInjection;
+using Npgsql;
+using Respawn;
+using Tasker.Web.Tasking.Data;
+using Testcontainers.PostgreSql;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tests;
+
+public class TaskerWebAppFactory(
+ PostgreSqlContainer postgres)
+ : WebApplicationFactory
+{
+ protected override void ConfigureWebHost(
+ IWebHostBuilder builder)
+ {
+ builder.ConfigureServices(services =>
+ {
+ var descriptor = services.Single(
+ d => d.ServiceType == typeof(DbContextOptions));
+
+ services.Remove(descriptor);
+ services.AddDbContext(options =>
+ {
+ options.UseNpgsql(postgres.GetConnectionString());
+ });
+ });
+ }
+}
+
+public sealed class InfrastructureFixture
+ : IAsyncLifetime
+{
+ public PostgreSqlContainer? Postgres { get; private set; }
+ public TaskerWebAppFactory AppFactory { get; private set; } = null!;
+ public HttpClient Client { get; private set; } = null!;
+ public Respawner RespawnPoint { get; private set; } = null!;
+
+ public async Task InitializeAsync()
+ {
+ await CreateDatabaseContainerAsync();
+ await CreateDatabaseAsync();
+ await SeedDatabaseAsync();
+ await CreateDatabaseSnapshotAsync();
+
+ AppFactory = new TaskerWebAppFactory(Postgres!);
+ Client = AppFactory.CreateClient();
+ }
+
+ public async Task DisposeAsync()
+ {
+ if (Postgres is null) return;
+
+ await Postgres.StopAsync();
+ await Postgres.DisposeAsync();
+ }
+
+ private async Task CreateDatabaseContainerAsync()
+ {
+ Postgres = new PostgreSqlBuilder()
+ .WithDatabase("Tasker")
+ .WithUsername("sa")
+ .WithPassword("P@ssword123!")
+ .Build();
+
+ await Postgres.StartAsync();
+ }
+
+ private async Task CreateDatabaseAsync()
+ {
+ await using var context = new ProjectDbContext(GetDbContextOptions());
+ await context.Database.EnsureDeletedAsync();
+ await context.Database.EnsureCreatedAsync();
+ }
+
+ private async Task SeedDatabaseAsync()
+ {
+ await using var context = new ProjectDbContext(GetDbContextOptions());
+ await context.SeedTestDataAsync();
+ await context.SaveChangesAsync();
+ }
+
+ private async Task CreateDatabaseSnapshotAsync()
+ {
+ var connectionString = Postgres!.GetConnectionString();
+ await using var connection = new NpgsqlConnection(connectionString);
+ await connection.OpenAsync();
+ RespawnPoint = await Respawner.CreateAsync(
+ connection,
+ new RespawnerOptions
+ {
+ SchemasToInclude = ["public"],
+ DbAdapter = DbAdapter.Postgres
+ });
+ }
+
+ public async Task RespawnAsync()
+ {
+ await RestoreDatabaseSnapshotAsync();
+ await SeedDatabaseAsync();
+ }
+
+ private async Task RestoreDatabaseSnapshotAsync()
+ {
+ var connectionString = Postgres!.GetConnectionString();
+ await using var connection = new NpgsqlConnection(connectionString);
+ await connection.OpenAsync();
+ await RespawnPoint.ResetAsync(connection);
+ }
+
+ private DbContextOptions GetDbContextOptions()
+ {
+ var connectionString = Postgres!.GetConnectionString();
+
+ var builder = new DbContextOptionsBuilder();
+ builder.UseNpgsql(connectionString);
+ return builder.Options;
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web.Tests/Tasker.Web.Tests.csproj b/Tasker/Tasker.Web.Tests/Tasker.Web.Tests.csproj
new file mode 100644
index 0000000..0d6afd9
--- /dev/null
+++ b/Tasker/Tasker.Web.Tests/Tasker.Web.Tests.csproj
@@ -0,0 +1,36 @@
+
+
+
+ net9.0
+ enable
+ enable
+ false
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
+
+
+
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Tasker/Tasker.Web.Tests/TaskerWebTestBase.cs b/Tasker/Tasker.Web.Tests/TaskerWebTestBase.cs
new file mode 100644
index 0000000..8549f21
--- /dev/null
+++ b/Tasker/Tasker.Web.Tests/TaskerWebTestBase.cs
@@ -0,0 +1,21 @@
+namespace Tasker.Web.Tests;
+
+public class TaskerWebTestBase(
+ InfrastructureFixture fixture)
+ : IClassFixture, IAsyncLifetime
+{
+ protected InfrastructureFixture Fixture { get; } = fixture;
+
+ public async Task InitializeAsync()
+ {
+ Console.WriteLine("Before each test ?");
+
+ await Fixture.RespawnAsync();
+ }
+
+ public Task DisposeAsync()
+ {
+ Console.WriteLine("After each test ?");
+ return Task.CompletedTask;
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web.Tests/TaskingDbContextExtensions.cs b/Tasker/Tasker.Web.Tests/TaskingDbContextExtensions.cs
new file mode 100644
index 0000000..763882a
--- /dev/null
+++ b/Tasker/Tasker.Web.Tests/TaskingDbContextExtensions.cs
@@ -0,0 +1,44 @@
+using Tasker.Web.Tasking.Data;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tests;
+
+public static class TestData
+{
+ public static readonly Tasking.Data.Task[] Tasks =
+ [
+ new()
+ {
+ Id = Guid.CreateVersion7(),
+ Name = "Test Task A - Completed",
+ Description = "Test Task Description A - Completed",
+ DueDate = DateTimeOffset.UtcNow,
+ CompletedOn = DateTimeOffset.UtcNow.AddHours(-1),
+ },
+ new()
+ {
+ Id = Guid.CreateVersion7(),
+ Name = "Test Task B",
+ Description = "Test Task Description B",
+ DueDate = DateTimeOffset.UtcNow,
+ },
+ new()
+ {
+ Id = Guid.CreateVersion7(),
+ Name = "Test Task C",
+ Description = "Test Task Description C",
+ DueDate = DateTimeOffset.UtcNow,
+ }
+ ];
+}
+
+public static class TaskingDbContextExtensions
+{
+ public static async Task SeedTestDataAsync(
+ this ProjectDbContext? context)
+ {
+ ArgumentNullException.ThrowIfNull(context);
+
+ await context.Tasks.AddRangeAsync(TestData.Tasks);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web.Tests/WhenGettingMyTasks.cs b/Tasker/Tasker.Web.Tests/WhenGettingMyTasks.cs
new file mode 100644
index 0000000..d0c7b41
--- /dev/null
+++ b/Tasker/Tasker.Web.Tests/WhenGettingMyTasks.cs
@@ -0,0 +1,63 @@
+using System.Net.Http.Json;
+using FluentAssertions;
+using Tasker.Web.Tasking.Endpoints.Models;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tests;
+
+public class WhenGettingMyTasks(
+ InfrastructureFixture fixture)
+ : TaskerWebTestBase(fixture)
+{
+ private const int StartingNumberOfTasks = 3;
+
+ [Fact]
+ public async Task EnsureWeHaveTasks()
+ {
+ var tasks = await Fixture.Client.GetFromJsonAsync("tasks");
+
+ tasks.Should().NotBeNull();
+ tasks.Should().HaveCount(StartingNumberOfTasks);
+ }
+
+ [Fact]
+ public async Task EnsureWeCanCreateNewTask()
+ {
+ var newTask = new TaskDto
+ (
+ Id: Guid.NewGuid(),
+ CreatedOn: DateTimeOffset.UtcNow,
+ Name: "New Task",
+ Description: "New Task Description",
+ DueDate: DateTimeOffset.UtcNow
+ );
+
+ var response = await Fixture.Client.PostAsJsonAsync(
+ "tasks",
+ newTask);
+
+ var tasks = await Fixture.Client.GetFromJsonAsync("tasks");
+
+ tasks.Should().Contain(c => c.Id == newTask.Id);
+ }
+
+ [Fact]
+ public async Task EnsureWeCanCreateNewTask_WithoutDueDate()
+ {
+ var newTask = new TaskDto
+ (
+ Id: Guid.NewGuid(),
+ CreatedOn: DateTimeOffset.UtcNow,
+ Name: "New Task",
+ Description: "New Task Description"
+ );
+
+ var response = await Fixture.Client.PostAsJsonAsync(
+ "tasks",
+ newTask);
+
+ var tasks = await Fixture.Client.GetFromJsonAsync("tasks");
+
+ tasks.Should().Contain(newTask);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Data/ProjectingDbContext.cs b/Tasker/Tasker.Web/Data/ProjectingDbContext.cs
deleted file mode 100644
index aa79896..0000000
--- a/Tasker/Tasker.Web/Data/ProjectingDbContext.cs
+++ /dev/null
@@ -1,10 +0,0 @@
-using Microsoft.EntityFrameworkCore;
-
-namespace Tasker.Web.Data;
-
-public class ProjectingDbContext(
- DbContextOptions options)
- : DbContext(options)
-{
- public DbSet Tasks { get; set; }
-}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Endpoints/AddTask.cs b/Tasker/Tasker.Web/Endpoints/AddTask.cs
deleted file mode 100644
index 1c0954a..0000000
--- a/Tasker/Tasker.Web/Endpoints/AddTask.cs
+++ /dev/null
@@ -1,46 +0,0 @@
-using Tasker.Web.Data;
-using Task = System.Threading.Tasks.Task;
-
-namespace Tasker.Web.Endpoints;
-
-[PublicAPI]
-public record AddTaskRequest(
- Guid Id,
- string Name,
- string? Description,
- DateTimeOffset? DueDate);
-
-[PublicAPI]
-public class AddTaskEndpoint(
- ProjectingDbContext dbContext)
- : Endpoint
-{
- public override void Configure()
- {
- Post("/tasks");
- Options(o => o.WithTags("Projecting"));
- AllowAnonymous();
- }
-
- public override async Task HandleAsync(
- AddTaskRequest req,
- CancellationToken ct)
- {
- var task = await dbContext.Tasks.AddAsync(
- new Data.Task
- {
- Id = req.Id,
- Name = req.Name,
- Description = req.Description,
- DueDate = req.DueDate
- },
- cancellationToken: ct);
-
- await dbContext.SaveChangesAsync(ct);
-
- await SendCreatedAtAsync(
- task.Entity.Id,
- task.Entity,
- cancellation: ct);
- }
-}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Endpoints/GetTasks.cs b/Tasker/Tasker.Web/Endpoints/GetTasks.cs
deleted file mode 100644
index 1c6357e..0000000
--- a/Tasker/Tasker.Web/Endpoints/GetTasks.cs
+++ /dev/null
@@ -1,32 +0,0 @@
-using Microsoft.EntityFrameworkCore;
-using Tasker.Web.Data;
-using Task = System.Threading.Tasks.Task;
-
-namespace Tasker.Web.Endpoints;
-
-[PublicAPI]
-public class GetTasksEndpoint(
- ProjectingDbContext dbContext)
- : EndpointWithoutRequest
-{
- public override void Configure()
- {
- Get("/tasks");
- Options(o => o.WithTags("Projecting"));
- AllowAnonymous();
-
- }
-
- public override async Task HandleAsync(
- CancellationToken ct)
- {
- var tasks = await dbContext
- .Tasks
- .Where(t => t.CompletedOn == null)
- .ToListAsync(cancellationToken: ct);
-
- await SendOkAsync(
- tasks,
- cancellation: ct);
- }
-}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Migrations/ProjectingDbContextModelSnapshot.cs b/Tasker/Tasker.Web/Migrations/ProjectingDbContextModelSnapshot.cs
deleted file mode 100644
index 71d0f4c..0000000
--- a/Tasker/Tasker.Web/Migrations/ProjectingDbContextModelSnapshot.cs
+++ /dev/null
@@ -1,53 +0,0 @@
-//
-using System;
-using Microsoft.EntityFrameworkCore;
-using Microsoft.EntityFrameworkCore.Infrastructure;
-using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
-using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
-using Tasker.Web.Data;
-
-#nullable disable
-
-namespace Tasker.Web.Migrations
-{
- [DbContext(typeof(ProjectingDbContext))]
- partial class ProjectingDbContextModelSnapshot : ModelSnapshot
- {
- protected override void BuildModel(ModelBuilder modelBuilder)
- {
-#pragma warning disable 612, 618
- modelBuilder
- .HasAnnotation("ProductVersion", "8.0.7")
- .HasAnnotation("Relational:MaxIdentifierLength", 63);
-
- NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
-
- modelBuilder.Entity("Tasker.Web.Data.Task", b =>
- {
- b.Property("Id")
- .ValueGeneratedOnAdd()
- .HasColumnType("uuid");
-
- b.Property("CompletedOn")
- .HasColumnType("timestamp with time zone");
-
- b.Property("Description")
- .HasMaxLength(2048)
- .HasColumnType("character varying(2048)");
-
- b.Property("DueDate")
- .HasColumnType("timestamp with time zone");
-
- b.Property("Name")
- .IsRequired()
- .HasMaxLength(255)
- .HasColumnType("character varying(255)");
-
- b.HasKey("Id");
-
- b.ToTable("Tasks");
- });
-#pragma warning restore 612, 618
- }
- }
-}
diff --git a/Tasker/Tasker.Web/Program.cs b/Tasker/Tasker.Web/Program.cs
index b8e2fd7..ce763d5 100644
--- a/Tasker/Tasker.Web/Program.cs
+++ b/Tasker/Tasker.Web/Program.cs
@@ -1,5 +1,5 @@
using Microsoft.EntityFrameworkCore;
-using Tasker.Web.Data;
+using Tasker.Web.Tasking.Data;
var builder = WebApplication.CreateBuilder(args);
@@ -8,14 +8,14 @@ var builder = WebApplication.CreateBuilder(args);
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen();
builder.Services.AddFastEndpoints();
-builder.Services.AddDbContext(o =>
- o.UseNpgsql(builder.Configuration.GetConnectionString("ProjectingDbContext")));
+builder.Services.AddDbContext(o =>
+ o.UseNpgsql(builder.Configuration.GetConnectionString("TaskingDbContext")));
builder.Services.AddCors(options =>
{
options.AddPolicy("AllowSpecificOrigin",
policy =>
{
- policy.WithOrigins("http://localhost:5173")
+ policy.WithOrigins("http://localhost:5173", "http://localhost:5174")
.AllowAnyHeader()
.AllowAnyMethod();
});
@@ -40,4 +40,9 @@ if (app.Environment.IsDevelopment())
app.UseFastEndpoints();
-app.Run();
\ No newline at end of file
+app.Run();
+
+public partial class Program
+{
+
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasker.Web.csproj b/Tasker/Tasker.Web/Tasker.Web.csproj
index af4451d..8367582 100644
--- a/Tasker/Tasker.Web/Tasker.Web.csproj
+++ b/Tasker/Tasker.Web/Tasker.Web.csproj
@@ -4,15 +4,23 @@
net9.0
enable
enable
+ latest
-
-
+
+
-
-
+
+
+ all
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+
+
+
+
+
diff --git a/Tasker/Tasker.Web/Migrations/20240811051653_Initial.Designer.cs b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240811051653_Initial.Designer.cs
similarity index 93%
rename from Tasker/Tasker.Web/Migrations/20240811051653_Initial.Designer.cs
rename to Tasker/Tasker.Web/Tasking/Data/Migrations/20240811051653_Initial.Designer.cs
index d758417..87e4e04 100644
--- a/Tasker/Tasker.Web/Migrations/20240811051653_Initial.Designer.cs
+++ b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240811051653_Initial.Designer.cs
@@ -5,13 +5,13 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
-using Tasker.Web.Data;
+using Tasker.Web.Tasking.Data;
#nullable disable
-namespace Tasker.Web.Migrations
+namespace Tasker.Web.Tasking.Data.Migrations
{
- [DbContext(typeof(ProjectingDbContext))]
+ [DbContext(typeof(ProjectDbContext))]
[Migration("20240811051653_Initial")]
partial class Initial
{
diff --git a/Tasker/Tasker.Web/Migrations/20240811051653_Initial.cs b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240811051653_Initial.cs
similarity index 96%
rename from Tasker/Tasker.Web/Migrations/20240811051653_Initial.cs
rename to Tasker/Tasker.Web/Tasking/Data/Migrations/20240811051653_Initial.cs
index 02c0dbb..b925b20 100644
--- a/Tasker/Tasker.Web/Migrations/20240811051653_Initial.cs
+++ b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240811051653_Initial.cs
@@ -3,7 +3,7 @@ using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
-namespace Tasker.Web.Migrations
+namespace Tasker.Web.Tasking.Data.Migrations
{
///
public partial class Initial : Migration
diff --git a/Tasker/Tasker.Web/Migrations/20240812185703_AddCompletedOn.Designer.cs b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240812185703_AddCompletedOn.Designer.cs
similarity index 93%
rename from Tasker/Tasker.Web/Migrations/20240812185703_AddCompletedOn.Designer.cs
rename to Tasker/Tasker.Web/Tasking/Data/Migrations/20240812185703_AddCompletedOn.Designer.cs
index cd200c0..1036260 100644
--- a/Tasker/Tasker.Web/Migrations/20240812185703_AddCompletedOn.Designer.cs
+++ b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240812185703_AddCompletedOn.Designer.cs
@@ -5,13 +5,13 @@ using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
-using Tasker.Web.Data;
+using Tasker.Web.Tasking.Data;
#nullable disable
-namespace Tasker.Web.Migrations
+namespace Tasker.Web.Tasking.Data.Migrations
{
- [DbContext(typeof(ProjectingDbContext))]
+ [DbContext(typeof(ProjectDbContext))]
[Migration("20240812185703_AddCompletedOn")]
partial class AddCompletedOn
{
diff --git a/Tasker/Tasker.Web/Migrations/20240812185703_AddCompletedOn.cs b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240812185703_AddCompletedOn.cs
similarity index 94%
rename from Tasker/Tasker.Web/Migrations/20240812185703_AddCompletedOn.cs
rename to Tasker/Tasker.Web/Tasking/Data/Migrations/20240812185703_AddCompletedOn.cs
index eea3a3d..4eaa46d 100644
--- a/Tasker/Tasker.Web/Migrations/20240812185703_AddCompletedOn.cs
+++ b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240812185703_AddCompletedOn.cs
@@ -3,7 +3,7 @@ using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
-namespace Tasker.Web.Migrations
+namespace Tasker.Web.Tasking.Data.Migrations
{
///
public partial class AddCompletedOn : Migration
diff --git a/Tasker/Tasker.Web/Tasking/Data/Migrations/20240909205436_AddCreatedOn.Designer.cs b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240909205436_AddCreatedOn.Designer.cs
new file mode 100644
index 0000000..2d4ea22
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240909205436_AddCreatedOn.Designer.cs
@@ -0,0 +1,143 @@
+//
+using System;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.EntityFrameworkCore.Migrations;
+using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
+using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
+using Tasker.Web.Tasking.Data;
+
+#nullable disable
+
+namespace Tasker.Web.Tasking.Data.Migrations
+{
+ [DbContext(typeof(ProjectDbContext))]
+ [Migration("20240909205436_AddCreatedOn")]
+ partial class AddCreatedOn
+ {
+ ///
+ protected override void BuildTargetModel(ModelBuilder modelBuilder)
+ {
+#pragma warning disable 612, 618
+ modelBuilder
+ .HasAnnotation("ProductVersion", "8.0.7")
+ .HasAnnotation("Relational:MaxIdentifierLength", 63);
+
+ NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Project", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("CreatedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Description")
+ .HasMaxLength(2048)
+ .HasColumnType("character varying(2048)");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.HasKey("Id");
+
+ b.ToTable("Projects");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Sprint", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("EndDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("ProjectId")
+ .HasColumnType("uuid");
+
+ b.Property("StartDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.HasKey("Id");
+
+ b.HasIndex("ProjectId");
+
+ b.ToTable("Sprints");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Task", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("CompletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("CreatedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Description")
+ .HasMaxLength(2048)
+ .HasColumnType("character varying(2048)");
+
+ b.Property("DueDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("ProjectId")
+ .HasColumnType("uuid");
+
+ b.Property("SprintId")
+ .HasColumnType("uuid");
+
+ b.HasKey("Id");
+
+ b.HasIndex("ProjectId");
+
+ b.HasIndex("SprintId");
+
+ b.ToTable("Tasks");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Sprint", b =>
+ {
+ b.HasOne("Tasker.Web.Tasking.Data.Project", "Project")
+ .WithMany()
+ .HasForeignKey("ProjectId");
+
+ b.Navigation("Project");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Task", b =>
+ {
+ b.HasOne("Tasker.Web.Tasking.Data.Project", "Project")
+ .WithMany()
+ .HasForeignKey("ProjectId");
+
+ b.HasOne("Tasker.Web.Tasking.Data.Sprint", "Sprint")
+ .WithMany()
+ .HasForeignKey("SprintId");
+
+ b.Navigation("Project");
+
+ b.Navigation("Sprint");
+ });
+#pragma warning restore 612, 618
+ }
+ }
+}
diff --git a/Tasker/Tasker.Web/Tasking/Data/Migrations/20240909205436_AddCreatedOn.cs b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240909205436_AddCreatedOn.cs
new file mode 100644
index 0000000..e5a562c
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240909205436_AddCreatedOn.cs
@@ -0,0 +1,135 @@
+using System;
+using Microsoft.EntityFrameworkCore.Migrations;
+
+#nullable disable
+
+namespace Tasker.Web.Tasking.Data.Migrations
+{
+ ///
+ public partial class AddCreatedOn : Migration
+ {
+ ///
+ protected override void Up(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.AddColumn(
+ name: "CreatedOn",
+ table: "Tasks",
+ type: "timestamp with time zone",
+ nullable: false,
+ defaultValue: new DateTimeOffset(new DateTime(1, 1, 1, 0, 0, 0, 0, DateTimeKind.Unspecified), new TimeSpan(0, 0, 0, 0, 0)));
+
+ migrationBuilder.AddColumn(
+ name: "ProjectId",
+ table: "Tasks",
+ type: "uuid",
+ nullable: true);
+
+ migrationBuilder.AddColumn(
+ name: "SprintId",
+ table: "Tasks",
+ type: "uuid",
+ nullable: true);
+
+ migrationBuilder.CreateTable(
+ name: "Projects",
+ columns: table => new
+ {
+ Id = table.Column(type: "uuid", nullable: false),
+ CreatedOn = table.Column(type: "timestamp with time zone", nullable: false),
+ Name = table.Column(type: "character varying(255)", maxLength: 255, nullable: false),
+ Description = table.Column(type: "character varying(2048)", maxLength: 2048, nullable: true)
+ },
+ constraints: table =>
+ {
+ table.PrimaryKey("PK_Projects", x => x.Id);
+ });
+
+ migrationBuilder.CreateTable(
+ name: "Sprints",
+ columns: table => new
+ {
+ Id = table.Column(type: "uuid", nullable: false),
+ ProjectId = table.Column(type: "uuid", nullable: true),
+ Name = table.Column(type: "character varying(255)", maxLength: 255, nullable: false),
+ StartDate = table.Column(type: "timestamp with time zone", nullable: false),
+ EndDate = table.Column(type: "timestamp with time zone", nullable: false)
+ },
+ constraints: table =>
+ {
+ table.PrimaryKey("PK_Sprints", x => x.Id);
+ table.ForeignKey(
+ name: "FK_Sprints_Projects_ProjectId",
+ column: x => x.ProjectId,
+ principalTable: "Projects",
+ principalColumn: "Id");
+ });
+
+ migrationBuilder.CreateIndex(
+ name: "IX_Tasks_ProjectId",
+ table: "Tasks",
+ column: "ProjectId");
+
+ migrationBuilder.CreateIndex(
+ name: "IX_Tasks_SprintId",
+ table: "Tasks",
+ column: "SprintId");
+
+ migrationBuilder.CreateIndex(
+ name: "IX_Sprints_ProjectId",
+ table: "Sprints",
+ column: "ProjectId");
+
+ migrationBuilder.AddForeignKey(
+ name: "FK_Tasks_Projects_ProjectId",
+ table: "Tasks",
+ column: "ProjectId",
+ principalTable: "Projects",
+ principalColumn: "Id");
+
+ migrationBuilder.AddForeignKey(
+ name: "FK_Tasks_Sprints_SprintId",
+ table: "Tasks",
+ column: "SprintId",
+ principalTable: "Sprints",
+ principalColumn: "Id");
+ }
+
+ ///
+ protected override void Down(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.DropForeignKey(
+ name: "FK_Tasks_Projects_ProjectId",
+ table: "Tasks");
+
+ migrationBuilder.DropForeignKey(
+ name: "FK_Tasks_Sprints_SprintId",
+ table: "Tasks");
+
+ migrationBuilder.DropTable(
+ name: "Sprints");
+
+ migrationBuilder.DropTable(
+ name: "Projects");
+
+ migrationBuilder.DropIndex(
+ name: "IX_Tasks_ProjectId",
+ table: "Tasks");
+
+ migrationBuilder.DropIndex(
+ name: "IX_Tasks_SprintId",
+ table: "Tasks");
+
+ migrationBuilder.DropColumn(
+ name: "CreatedOn",
+ table: "Tasks");
+
+ migrationBuilder.DropColumn(
+ name: "ProjectId",
+ table: "Tasks");
+
+ migrationBuilder.DropColumn(
+ name: "SprintId",
+ table: "Tasks");
+ }
+ }
+}
diff --git a/Tasker/Tasker.Web/Tasking/Data/Migrations/20240910155512_AddsDeletedOn.Designer.cs b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240910155512_AddsDeletedOn.Designer.cs
new file mode 100644
index 0000000..fc75ff1
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240910155512_AddsDeletedOn.Designer.cs
@@ -0,0 +1,158 @@
+//
+using System;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.EntityFrameworkCore.Migrations;
+using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
+using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
+using Tasker.Web.Tasking.Data;
+
+#nullable disable
+
+namespace Tasker.Web.Tasking.Data.Migrations
+{
+ [DbContext(typeof(ProjectDbContext))]
+ [Migration("20240910155512_AddsDeletedOn")]
+ partial class AddsDeletedOn
+ {
+ ///
+ protected override void BuildTargetModel(ModelBuilder modelBuilder)
+ {
+#pragma warning disable 612, 618
+ modelBuilder
+ .HasAnnotation("ProductVersion", "8.0.7")
+ .HasAnnotation("Relational:MaxIdentifierLength", 63);
+
+ NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Project", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("CompletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("CreatedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("DeletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Description")
+ .HasMaxLength(2048)
+ .HasColumnType("character varying(2048)");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.HasKey("Id");
+
+ b.ToTable("Projects");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Sprint", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("CreatedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("DeletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("EndDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("ProjectId")
+ .HasColumnType("uuid");
+
+ b.Property("StartDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.HasKey("Id");
+
+ b.HasIndex("ProjectId");
+
+ b.ToTable("Sprints");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Task", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("CompletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("CreatedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("DeletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Description")
+ .HasMaxLength(2048)
+ .HasColumnType("character varying(2048)");
+
+ b.Property("DueDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("ProjectId")
+ .HasColumnType("uuid");
+
+ b.Property("SprintId")
+ .HasColumnType("uuid");
+
+ b.HasKey("Id");
+
+ b.HasIndex("ProjectId");
+
+ b.HasIndex("SprintId");
+
+ b.ToTable("Tasks");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Sprint", b =>
+ {
+ b.HasOne("Tasker.Web.Tasking.Data.Project", "Project")
+ .WithMany()
+ .HasForeignKey("ProjectId");
+
+ b.Navigation("Project");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Task", b =>
+ {
+ b.HasOne("Tasker.Web.Tasking.Data.Project", "Project")
+ .WithMany()
+ .HasForeignKey("ProjectId");
+
+ b.HasOne("Tasker.Web.Tasking.Data.Sprint", "Sprint")
+ .WithMany()
+ .HasForeignKey("SprintId");
+
+ b.Navigation("Project");
+
+ b.Navigation("Sprint");
+ });
+#pragma warning restore 612, 618
+ }
+ }
+}
diff --git a/Tasker/Tasker.Web/Tasking/Data/Migrations/20240910155512_AddsDeletedOn.cs b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240910155512_AddsDeletedOn.cs
new file mode 100644
index 0000000..51e4ec4
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Data/Migrations/20240910155512_AddsDeletedOn.cs
@@ -0,0 +1,70 @@
+using System;
+using Microsoft.EntityFrameworkCore.Migrations;
+
+#nullable disable
+
+namespace Tasker.Web.Tasking.Data.Migrations
+{
+ ///
+ public partial class AddsDeletedOn : Migration
+ {
+ ///
+ protected override void Up(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.AddColumn(
+ name: "DeletedOn",
+ table: "Tasks",
+ type: "timestamp with time zone",
+ nullable: true);
+
+ migrationBuilder.AddColumn(
+ name: "CreatedOn",
+ table: "Sprints",
+ type: "timestamp with time zone",
+ nullable: false,
+ defaultValue: new DateTimeOffset(new DateTime(1, 1, 1, 0, 0, 0, 0, DateTimeKind.Unspecified), new TimeSpan(0, 0, 0, 0, 0)));
+
+ migrationBuilder.AddColumn(
+ name: "DeletedOn",
+ table: "Sprints",
+ type: "timestamp with time zone",
+ nullable: true);
+
+ migrationBuilder.AddColumn(
+ name: "CompletedOn",
+ table: "Projects",
+ type: "timestamp with time zone",
+ nullable: true);
+
+ migrationBuilder.AddColumn(
+ name: "DeletedOn",
+ table: "Projects",
+ type: "timestamp with time zone",
+ nullable: true);
+ }
+
+ ///
+ protected override void Down(MigrationBuilder migrationBuilder)
+ {
+ migrationBuilder.DropColumn(
+ name: "DeletedOn",
+ table: "Tasks");
+
+ migrationBuilder.DropColumn(
+ name: "CreatedOn",
+ table: "Sprints");
+
+ migrationBuilder.DropColumn(
+ name: "DeletedOn",
+ table: "Sprints");
+
+ migrationBuilder.DropColumn(
+ name: "CompletedOn",
+ table: "Projects");
+
+ migrationBuilder.DropColumn(
+ name: "DeletedOn",
+ table: "Projects");
+ }
+ }
+}
diff --git a/Tasker/Tasker.Web/Tasking/Data/Migrations/TaskingDbContextModelSnapshot.cs b/Tasker/Tasker.Web/Tasking/Data/Migrations/TaskingDbContextModelSnapshot.cs
new file mode 100644
index 0000000..c1d94ab
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Data/Migrations/TaskingDbContextModelSnapshot.cs
@@ -0,0 +1,155 @@
+//
+using System;
+using Microsoft.EntityFrameworkCore;
+using Microsoft.EntityFrameworkCore.Infrastructure;
+using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
+using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
+using Tasker.Web.Tasking.Data;
+
+#nullable disable
+
+namespace Tasker.Web.Tasking.Data.Migrations
+{
+ [DbContext(typeof(ProjectDbContext))]
+ partial class TaskingDbContextModelSnapshot : ModelSnapshot
+ {
+ protected override void BuildModel(ModelBuilder modelBuilder)
+ {
+#pragma warning disable 612, 618
+ modelBuilder
+ .HasAnnotation("ProductVersion", "8.0.7")
+ .HasAnnotation("Relational:MaxIdentifierLength", 63);
+
+ NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Project", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("CompletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("CreatedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("DeletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Description")
+ .HasMaxLength(2048)
+ .HasColumnType("character varying(2048)");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.HasKey("Id");
+
+ b.ToTable("Projects");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Sprint", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("CreatedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("DeletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("EndDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("ProjectId")
+ .HasColumnType("uuid");
+
+ b.Property("StartDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.HasKey("Id");
+
+ b.HasIndex("ProjectId");
+
+ b.ToTable("Sprints");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Task", b =>
+ {
+ b.Property("Id")
+ .ValueGeneratedOnAdd()
+ .HasColumnType("uuid");
+
+ b.Property("CompletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("CreatedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("DeletedOn")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Description")
+ .HasMaxLength(2048)
+ .HasColumnType("character varying(2048)");
+
+ b.Property("DueDate")
+ .HasColumnType("timestamp with time zone");
+
+ b.Property("Name")
+ .IsRequired()
+ .HasMaxLength(255)
+ .HasColumnType("character varying(255)");
+
+ b.Property("ProjectId")
+ .HasColumnType("uuid");
+
+ b.Property("SprintId")
+ .HasColumnType("uuid");
+
+ b.HasKey("Id");
+
+ b.HasIndex("ProjectId");
+
+ b.HasIndex("SprintId");
+
+ b.ToTable("Tasks");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Sprint", b =>
+ {
+ b.HasOne("Tasker.Web.Tasking.Data.Project", "Project")
+ .WithMany()
+ .HasForeignKey("ProjectId");
+
+ b.Navigation("Project");
+ });
+
+ modelBuilder.Entity("Tasker.Web.Tasking.Data.Task", b =>
+ {
+ b.HasOne("Tasker.Web.Tasking.Data.Project", "Project")
+ .WithMany()
+ .HasForeignKey("ProjectId");
+
+ b.HasOne("Tasker.Web.Tasking.Data.Sprint", "Sprint")
+ .WithMany()
+ .HasForeignKey("SprintId");
+
+ b.Navigation("Project");
+
+ b.Navigation("Sprint");
+ });
+#pragma warning restore 612, 618
+ }
+ }
+}
diff --git a/Tasker/Tasker.Web/Tasking/Data/Project.cs b/Tasker/Tasker.Web/Tasking/Data/Project.cs
new file mode 100644
index 0000000..32a11f2
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Data/Project.cs
@@ -0,0 +1,13 @@
+using System.ComponentModel.DataAnnotations;
+
+namespace Tasker.Web.Tasking.Data;
+
+public class Project
+{
+ public Guid Id { get; set; }
+ public DateTimeOffset CreatedOn { get; set; }
+ public DateTimeOffset? CompletedOn { get; set; }
+ public DateTimeOffset? DeletedOn { get; set; }
+ [MaxLength(255)] public required string Name { get; set; }
+ [MaxLength(2048)] public required string? Description { get; set; }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Data/ProjectDbContext.cs b/Tasker/Tasker.Web/Tasking/Data/ProjectDbContext.cs
new file mode 100644
index 0000000..95899b7
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Data/ProjectDbContext.cs
@@ -0,0 +1,24 @@
+using Microsoft.EntityFrameworkCore;
+
+namespace Tasker.Web.Tasking.Data;
+
+public class ProjectDbContext(
+ DbContextOptions options)
+ : DbContext(options)
+{
+ public DbSet Projects { get; set; }
+ public DbSet Sprints { get; set; }
+ public DbSet Tasks { get; set; }
+
+ protected override void OnModelCreating(
+ ModelBuilder modelBuilder)
+ {
+ modelBuilder
+ .Entity()
+ .HasOne(t => t.Project);
+
+ modelBuilder
+ .Entity()
+ .HasIndex(t => t.ProjectId);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Data/Sprint.cs b/Tasker/Tasker.Web/Tasking/Data/Sprint.cs
new file mode 100644
index 0000000..a7f89bc
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Data/Sprint.cs
@@ -0,0 +1,15 @@
+using System.ComponentModel.DataAnnotations;
+
+namespace Tasker.Web.Tasking.Data;
+
+public class Sprint
+{
+ public Guid Id { get; set; }
+ public DateTimeOffset CreatedOn { get; set; }
+ public Guid? ProjectId { get; set; }
+ public Project? Project { get; set; }
+ [MaxLength(255)] public required string Name { get; set; }
+ public DateTime StartDate { get; set; }
+ public DateTime EndDate { get; set; }
+ public DateTimeOffset? DeletedOn { get; set; }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Data/Task.cs b/Tasker/Tasker.Web/Tasking/Data/Task.cs
similarity index 50%
rename from Tasker/Tasker.Web/Data/Task.cs
rename to Tasker/Tasker.Web/Tasking/Data/Task.cs
index 2be0790..9dcccd9 100644
--- a/Tasker/Tasker.Web/Data/Task.cs
+++ b/Tasker/Tasker.Web/Tasking/Data/Task.cs
@@ -1,12 +1,23 @@
using System.ComponentModel.DataAnnotations;
-namespace Tasker.Web.Data;
+namespace Tasker.Web.Tasking.Data;
public class Task
{
public Guid Id { get; set; }
+
+ public Guid? ProjectId { get; set; }
+ public Project? Project { get; set; }
+
+ public Guid? SprintId { get; set; }
+ public Sprint? Sprint { get; set; }
+
+
[MaxLength(255)] public required string Name { get; set; }
[MaxLength(2048)] public string? Description { get; set; }
+
+ public DateTimeOffset CreatedOn { get; set; }
public DateTimeOffset? DueDate { get; set; }
public DateTimeOffset? CompletedOn { get; set; }
+ public DateTimeOffset? DeletedOn { get; set; }
}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Endpoints/CompleteTask.cs b/Tasker/Tasker.Web/Tasking/Endpoints/CompleteTask.cs
similarity index 70%
rename from Tasker/Tasker.Web/Endpoints/CompleteTask.cs
rename to Tasker/Tasker.Web/Tasking/Endpoints/CompleteTask.cs
index ce63b23..e1cfbe9 100644
--- a/Tasker/Tasker.Web/Endpoints/CompleteTask.cs
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/CompleteTask.cs
@@ -1,7 +1,7 @@
-using Tasker.Web.Data;
+using Tasker.Web.Tasking.Data;
using Task = System.Threading.Tasks.Task;
-namespace Tasker.Web.Endpoints;
+namespace Tasker.Web.Tasking.Endpoints;
[PublicAPI]
public record CompleteTaskRequest(
@@ -10,13 +10,13 @@ public record CompleteTaskRequest(
[PublicAPI]
public class CompleteTaskEndpoint(
- ProjectingDbContext dbContext)
+ ProjectDbContext dbContext)
: Endpoint
{
public override void Configure()
{
- Post("/tasks/{Id}/complete");
- Options(o => o.WithTags("Projecting"));
+ Put("/tasks/complete");
+ Options(o => o.WithTags("Tasks"));
AllowAnonymous();
}
@@ -36,6 +36,12 @@ public class CompleteTaskEndpoint(
return;
}
+ if (task.CompletedOn is not null)
+ {
+ await SendResultAsync(Results.BadRequest("The task was already completed"));
+ return;
+ }
+
task.CompletedOn = req.CompleteOn;
await dbContext.SaveChangesAsync(ct);
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/CreateProject.cs b/Tasker/Tasker.Web/Tasking/Endpoints/CreateProject.cs
new file mode 100644
index 0000000..681f6b7
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/CreateProject.cs
@@ -0,0 +1,47 @@
+using Tasker.Web.Tasking.Data;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public record CreateProjectRequest(
+ Guid Id,
+ string Name,
+ string? Description);
+
+[PublicAPI]
+public class CreateProjectEndpoint(
+ ProjectDbContext dbContext)
+ : Endpoint
+{
+ public override void Configure()
+ {
+ Post("/projects");
+ Options(o => o.WithTags("Projects"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ CreateProjectRequest req,
+ CancellationToken ct)
+ {
+ var project = await dbContext
+ .Projects
+ .AddAsync(
+ new Project
+ {
+ Id = req.Id,
+ CreatedOn = DateTimeOffset.UtcNow,
+ Name = req.Name,
+ Description = req.Description
+ },
+ cancellationToken: ct);
+
+ await dbContext.SaveChangesAsync(ct);
+
+ await SendCreatedAtAsync(
+ project.Entity.Id,
+ project.Entity,
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/CreateTask.cs b/Tasker/Tasker.Web/Tasking/Endpoints/CreateTask.cs
new file mode 100644
index 0000000..5df0114
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/CreateTask.cs
@@ -0,0 +1,49 @@
+using Tasker.Web.Tasking.Data;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public record CreateTaskRequest(
+ Guid Id,
+ string Name,
+ string? Description,
+ DateTimeOffset? DueDate);
+
+[PublicAPI]
+public class CreateTaskEndpoint(
+ ProjectDbContext dbContext)
+ : Endpoint
+{
+ public override void Configure()
+ {
+ Post("/tasks");
+ Options(o => o.WithTags("Tasks"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ CreateTaskRequest req,
+ CancellationToken ct)
+ {
+ var task = await dbContext
+ .Tasks
+ .AddAsync(
+ new Data.Task
+ {
+ Id = req.Id,
+ CreatedOn = DateTimeOffset.UtcNow,
+ Name = req.Name,
+ Description = req.Description,
+ DueDate = req.DueDate?.ToUniversalTime()
+ },
+ cancellationToken: ct);
+
+ await dbContext.SaveChangesAsync(ct);
+
+ await SendCreatedAtAsync(
+ task.Entity.Id,
+ task.Entity,
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/DeleteProject.cs b/Tasker/Tasker.Web/Tasking/Endpoints/DeleteProject.cs
new file mode 100644
index 0000000..9f80aa6
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/DeleteProject.cs
@@ -0,0 +1,46 @@
+using Tasker.Web.Tasking.Data;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public record DeleteProjectRequest(
+ Guid Id);
+
+[PublicAPI]
+public class DeleteProjectEndpoint(
+ ProjectDbContext dbContext)
+ : Endpoint
+{
+ public override void Configure()
+ {
+ Delete("/projects/{Id}");
+ Options(o => o.WithTags("Projects"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ DeleteProjectRequest req,
+ CancellationToken ct)
+ {
+ var project = await dbContext
+ .Projects
+ .FindAsync(
+ [req.Id],
+ ct);
+
+ if (project is null)
+ {
+ await SendNotFoundAsync(ct);
+ return;
+ }
+
+ project.DeletedOn = DateTimeOffset.UtcNow;
+
+ await dbContext.SaveChangesAsync(ct);
+
+ await SendOkAsync(
+ project,
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/DeleteTask.cs b/Tasker/Tasker.Web/Tasking/Endpoints/DeleteTask.cs
new file mode 100644
index 0000000..5b31c50
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/DeleteTask.cs
@@ -0,0 +1,46 @@
+using Tasker.Web.Tasking.Data;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public record DeleteTaskRequest(
+ Guid Id);
+
+[PublicAPI]
+public class DeleteTaskEndpoint(
+ ProjectDbContext dbContext)
+ : Endpoint
+{
+ public override void Configure()
+ {
+ Delete("/tasks/{Id}");
+ Options(o => o.WithTags("Tasks"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ DeleteTaskRequest req,
+ CancellationToken ct)
+ {
+ var task = await dbContext
+ .Tasks
+ .FindAsync(
+ [req.Id],
+ ct);
+
+ if (task is null)
+ {
+ await SendNotFoundAsync(ct);
+ return;
+ }
+
+ task.DeletedOn = DateTimeOffset.UtcNow;
+
+ await dbContext.SaveChangesAsync(ct);
+
+ await SendOkAsync(
+ task,
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/GetProject.cs b/Tasker/Tasker.Web/Tasking/Endpoints/GetProject.cs
new file mode 100644
index 0000000..f2595b1
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/GetProject.cs
@@ -0,0 +1,44 @@
+using Tasker.Web.Tasking.Data;
+using Tasker.Web.Tasking.Endpoints.Models;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public record GetProjectRequest(
+ Guid Id,
+ DateTimeOffset CreatedOn,
+ string Name,
+ string? Description);
+
+[PublicAPI]
+public class GetProjectEndpoint(
+ ProjectDbContext dbContext)
+ : Endpoint
+{
+ public override void Configure()
+ {
+ Get("/projects/{Id}");
+ Options(o => o.WithTags("Projects"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ GetProjectRequest req,
+ CancellationToken ct)
+ {
+ var task = await dbContext
+ .Projects
+ .FindAsync(
+ [req.Id],
+ cancellationToken: ct);
+
+ await SendOkAsync(
+ new ProjectDto(
+ task.Id,
+ task.CreatedOn,
+ task.Name,
+ task.Description),
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/GetProjects.cs b/Tasker/Tasker.Web/Tasking/Endpoints/GetProjects.cs
new file mode 100644
index 0000000..4847ea2
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/GetProjects.cs
@@ -0,0 +1,37 @@
+using Microsoft.EntityFrameworkCore;
+using Tasker.Web.Tasking.Data;
+using Tasker.Web.Tasking.Endpoints.Models;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public class GetProjectsEndpoint(
+ ProjectDbContext dbContext)
+ : EndpointWithoutRequest
+{
+ public override void Configure()
+ {
+ Get("/projects");
+ Options(o => o.WithTags("Projects"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ CancellationToken ct)
+ {
+ var projects = await dbContext
+ .Projects
+ .OrderByDescending(task => task.CreatedOn)
+ .Select(task => new ProjectDto(
+ task.Id,
+ task.CreatedOn,
+ task.Name,
+ task.Description))
+ .ToArrayAsync(cancellationToken: ct);
+
+ await SendOkAsync(
+ projects,
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Endpoints/GetTask.cs b/Tasker/Tasker.Web/Tasking/Endpoints/GetTask.cs
similarity index 58%
rename from Tasker/Tasker.Web/Endpoints/GetTask.cs
rename to Tasker/Tasker.Web/Tasking/Endpoints/GetTask.cs
index 5e72ea8..8be6134 100644
--- a/Tasker/Tasker.Web/Endpoints/GetTask.cs
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/GetTask.cs
@@ -1,7 +1,7 @@
-using Tasker.Web.Data;
+using Tasker.Web.Tasking.Data;
using Task = System.Threading.Tasks.Task;
-namespace Tasker.Web.Endpoints;
+namespace Tasker.Web.Tasking.Endpoints;
[PublicAPI]
public record GetTaskRequest(
@@ -9,21 +9,25 @@ public record GetTaskRequest(
[PublicAPI]
public class GetTaskEndpoint(
- ProjectingDbContext dbContext)
+ ProjectDbContext dbContext)
: Endpoint
{
public override void Configure()
{
Get("/tasks/{Id}");
- Options(o => o.WithTags("Projecting"));
+ Options(o => o.WithTags("Tasks"));
+ AllowAnonymous();
}
public override async Task HandleAsync(
GetTaskRequest req,
CancellationToken ct)
{
- var task = await dbContext.FindAsync(
- req.Id);
+ var task = await dbContext
+ .Tasks
+ .FindAsync(
+ [req.Id],
+ cancellationToken: ct);
await SendOkAsync(
task,
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/GetTasks.cs b/Tasker/Tasker.Web/Tasking/Endpoints/GetTasks.cs
new file mode 100644
index 0000000..c00557b
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/GetTasks.cs
@@ -0,0 +1,39 @@
+using Microsoft.EntityFrameworkCore;
+using Tasker.Web.Tasking.Data;
+using Tasker.Web.Tasking.Endpoints.Models;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public class GetTasksEndpoint(
+ ProjectDbContext dbContext)
+ : EndpointWithoutRequest
+{
+ public override void Configure()
+ {
+ Get("/tasks");
+ Options(o => o.WithTags("Tasks"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ CancellationToken ct)
+ {
+ var tasks = await dbContext
+ .Tasks
+ .OrderByDescending(task => task.CompletedOn)
+ .Select(task => new TaskDto(
+ task.Id,
+ task.CreatedOn,
+ task.Name,
+ task.Description,
+ task.DueDate,
+ task.CompletedOn))
+ .ToArrayAsync(cancellationToken: ct);
+
+ await SendOkAsync(
+ tasks,
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/Models/ProjectDto.cs b/Tasker/Tasker.Web/Tasking/Endpoints/Models/ProjectDto.cs
new file mode 100644
index 0000000..cc655f5
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/Models/ProjectDto.cs
@@ -0,0 +1,8 @@
+namespace Tasker.Web.Tasking.Endpoints.Models;
+
+[PublicAPI]
+public record struct ProjectDto(
+ Guid Id,
+ DateTimeOffset CreatedOn,
+ string Name,
+ string? Description);
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/Models/TaskDto.cs b/Tasker/Tasker.Web/Tasking/Endpoints/Models/TaskDto.cs
new file mode 100644
index 0000000..425458e
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/Models/TaskDto.cs
@@ -0,0 +1,10 @@
+namespace Tasker.Web.Tasking.Endpoints.Models;
+
+[PublicAPI]
+public record TaskDto(
+ Guid Id,
+ DateTimeOffset CreatedOn,
+ string Name,
+ string? Description,
+ DateTimeOffset? DueDate = null,
+ DateTimeOffset? CompletedOn = null);
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/RenameDescription.cs b/Tasker/Tasker.Web/Tasking/Endpoints/RenameDescription.cs
new file mode 100644
index 0000000..8665333
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/RenameDescription.cs
@@ -0,0 +1,47 @@
+using Tasker.Web.Tasking.Data;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public record RenameDescriptionRequest(
+ Guid Id,
+ string? Description);
+
+[PublicAPI]
+public class RenameDescriptionEndpoint(
+ ProjectDbContext dbContext)
+ : Endpoint
+{
+ public override void Configure()
+ {
+ Put("/projects/{Id}/description");
+ Options(o => o.WithTags("Projects"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ RenameDescriptionRequest req,
+ CancellationToken ct)
+ {
+ var project = await dbContext
+ .Projects
+ .FindAsync(
+ [req.Id],
+ cancellationToken: ct);
+
+ if (project is null)
+ {
+ await SendNotFoundAsync(ct);
+ return;
+ }
+
+ project.Description = req.Description;
+
+ await dbContext.SaveChangesAsync(ct);
+
+ await SendOkAsync(
+ project,
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/RenameProject.cs b/Tasker/Tasker.Web/Tasking/Endpoints/RenameProject.cs
new file mode 100644
index 0000000..5b6fdc0
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/RenameProject.cs
@@ -0,0 +1,47 @@
+using Tasker.Web.Tasking.Data;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public record RenameProjectRequest(
+ Guid Id,
+ string Name);
+
+[PublicAPI]
+public class RenameProjectEndpoint(
+ ProjectDbContext dbContext)
+ : Endpoint
+{
+ public override void Configure()
+ {
+ Put("/projects/{Id}/name");
+ Options(o => o.WithTags("Projects"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ RenameProjectRequest req,
+ CancellationToken ct)
+ {
+ var project = await dbContext
+ .Projects
+ .FindAsync(
+ [req.Id],
+ cancellationToken: ct);
+
+ if (project is null)
+ {
+ await SendNotFoundAsync(ct);
+ return;
+ }
+
+ project.Name = req.Name;
+
+ await dbContext.SaveChangesAsync(ct);
+
+ await SendOkAsync(
+ project,
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/Tasking/Endpoints/UnCompleteTask.cs b/Tasker/Tasker.Web/Tasking/Endpoints/UnCompleteTask.cs
new file mode 100644
index 0000000..6c03bc9
--- /dev/null
+++ b/Tasker/Tasker.Web/Tasking/Endpoints/UnCompleteTask.cs
@@ -0,0 +1,52 @@
+using Tasker.Web.Tasking.Data;
+using Task = System.Threading.Tasks.Task;
+
+namespace Tasker.Web.Tasking.Endpoints;
+
+[PublicAPI]
+public record UnCompleteTaskRequest(
+ Guid Id);
+
+[PublicAPI]
+public class UnCompleteTaskEndpoint(
+ ProjectDbContext dbContext)
+ : Endpoint
+{
+ public override void Configure()
+ {
+ Put("/tasks/uncomplete");
+ Options(o => o.WithTags("Tasks"));
+ AllowAnonymous();
+ }
+
+ public override async Task HandleAsync(
+ UnCompleteTaskRequest req,
+ CancellationToken ct)
+ {
+ var task = await dbContext
+ .Tasks
+ .FindAsync(
+ [req.Id],
+ cancellationToken: ct);
+
+ if (task is null)
+ {
+ await SendNotFoundAsync(ct);
+ return;
+ }
+
+ if (task.CompletedOn is null)
+ {
+ await SendResultAsync(Results.BadRequest("The task was already un-completed"));
+ return;
+ }
+
+ task.CompletedOn = null;
+
+ await dbContext.SaveChangesAsync(ct);
+
+ await SendOkAsync(
+ task,
+ cancellation: ct);
+ }
+}
\ No newline at end of file
diff --git a/Tasker/Tasker.Web/appsettings.Development.json b/Tasker/Tasker.Web/appsettings.Development.json
index e8ec6aa..8f863cb 100644
--- a/Tasker/Tasker.Web/appsettings.Development.json
+++ b/Tasker/Tasker.Web/appsettings.Development.json
@@ -6,6 +6,6 @@
}
},
"ConnectionStrings": {
- "ProjectingDbContext": "Server=127.0.0.1;Port=5432;Database=Tasker;User Id=sa;Password=P@ssword123!;"
+ "TaskingDbContext": "Server=127.0.0.1;Port=5400;Database=Tasker;User Id=sa;Password=P@ssword123!;"
}
}
\ No newline at end of file
diff --git a/Tasker/Tasker.sln b/Tasker/Tasker.sln
index 1f893b7..b5b1740 100644
--- a/Tasker/Tasker.sln
+++ b/Tasker/Tasker.sln
@@ -1,16 +1,48 @@

Microsoft Visual Studio Solution File, Format Version 12.00
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tasker.Web", "Tasker.Web\Tasker.Web.csproj", "{5718F5B9-8EF2-4FE6-ABA4-21BC699F4023}"
+# Visual Studio Version 17
+VisualStudioVersion = 17.0.31903.59
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tasker.Web", "Tasker.Web\Tasker.Web.csproj", "{301B9679-0E9A-44F3-A83C-58ABF05F38E1}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tasker.Web.Tests", "Tasker.Web.Tests\Tasker.Web.Tests.csproj", "{F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
+ Debug|x64 = Debug|x64
+ Debug|x86 = Debug|x86
Release|Any CPU = Release|Any CPU
+ Release|x64 = Release|x64
+ Release|x86 = Release|x86
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
- {5718F5B9-8EF2-4FE6-ABA4-21BC699F4023}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
- {5718F5B9-8EF2-4FE6-ABA4-21BC699F4023}.Debug|Any CPU.Build.0 = Debug|Any CPU
- {5718F5B9-8EF2-4FE6-ABA4-21BC699F4023}.Release|Any CPU.ActiveCfg = Release|Any CPU
- {5718F5B9-8EF2-4FE6-ABA4-21BC699F4023}.Release|Any CPU.Build.0 = Release|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Debug|x64.Build.0 = Debug|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Debug|x86.Build.0 = Debug|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Release|Any CPU.Build.0 = Release|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Release|x64.ActiveCfg = Release|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Release|x64.Build.0 = Release|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Release|x86.ActiveCfg = Release|Any CPU
+ {301B9679-0E9A-44F3-A83C-58ABF05F38E1}.Release|x86.Build.0 = Release|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Debug|x64.Build.0 = Debug|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Debug|x86.Build.0 = Debug|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Release|Any CPU.Build.0 = Release|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Release|x64.ActiveCfg = Release|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Release|x64.Build.0 = Release|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Release|x86.ActiveCfg = Release|Any CPU
+ {F2F5446E-3DF3-4F9E-AD5C-4D90E4A44B97}.Release|x86.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
EndGlobalSection
EndGlobal
diff --git a/Tasker/Tasker.sln.DotSettings.user b/Tasker/Tasker.sln.DotSettings.user
deleted file mode 100644
index bf9ca05..0000000
--- a/Tasker/Tasker.sln.DotSettings.user
+++ /dev/null
@@ -1,7 +0,0 @@
-
- ForceIncluded
- ForceIncluded
- ForceIncluded
- ForceIncluded
- ForceIncluded
- ForceIncluded
\ No newline at end of file
diff --git a/Tasker/dotnet-install.sh b/Tasker/dotnet-install.sh
new file mode 100644
index 0000000..38a160c
--- /dev/null
+++ b/Tasker/dotnet-install.sh
@@ -0,0 +1,1868 @@
+#!/usr/bin/env bash
+# Copyright (c) .NET Foundation and contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE file in the project root for full license information.
+#
+
+# Stop script on NZEC
+set -e
+# Stop script if unbound variable found (use ${var:-} if intentional)
+set -u
+# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success
+# This is causing it to fail
+set -o pipefail
+
+# Use in the the functions: eval $invocation
+invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"'
+
+# standard output may be used as a return value in the functions
+# we need a way to write text on the screen in the functions so that
+# it won't interfere with the return value.
+# Exposing stream 3 as a pipe to standard output of the script itself
+exec 3>&1
+
+# Setup some colors to use. These need to work in fairly limited shells, like the Ubuntu Docker container where there are only 8 colors.
+# See if stdout is a terminal
+if [ -t 1 ] && command -v tput > /dev/null; then
+ # see if it supports colors
+ ncolors=$(tput colors || echo 0)
+ if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
+ bold="$(tput bold || echo)"
+ normal="$(tput sgr0 || echo)"
+ black="$(tput setaf 0 || echo)"
+ red="$(tput setaf 1 || echo)"
+ green="$(tput setaf 2 || echo)"
+ yellow="$(tput setaf 3 || echo)"
+ blue="$(tput setaf 4 || echo)"
+ magenta="$(tput setaf 5 || echo)"
+ cyan="$(tput setaf 6 || echo)"
+ white="$(tput setaf 7 || echo)"
+ fi
+fi
+
+say_warning() {
+ printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}" >&3
+}
+
+say_err() {
+ printf "%b\n" "${red:-}dotnet_install: Error: $1${normal:-}" >&2
+}
+
+say() {
+ # using stream 3 (defined in the beginning) to not interfere with stdout of functions
+ # which may be used as return value
+ printf "%b\n" "${cyan:-}dotnet-install:${normal:-} $1" >&3
+}
+
+say_verbose() {
+ if [ "$verbose" = true ]; then
+ say "$1"
+ fi
+}
+
+# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets,
+# then and only then should the Linux distribution appear in this list.
+# Adding a Linux distribution to this list does not imply distribution-specific support.
+get_legacy_os_name_from_platform() {
+ eval $invocation
+
+ platform="$1"
+ case "$platform" in
+ "centos.7")
+ echo "centos"
+ return 0
+ ;;
+ "debian.8")
+ echo "debian"
+ return 0
+ ;;
+ "debian.9")
+ echo "debian.9"
+ return 0
+ ;;
+ "fedora.23")
+ echo "fedora.23"
+ return 0
+ ;;
+ "fedora.24")
+ echo "fedora.24"
+ return 0
+ ;;
+ "fedora.27")
+ echo "fedora.27"
+ return 0
+ ;;
+ "fedora.28")
+ echo "fedora.28"
+ return 0
+ ;;
+ "opensuse.13.2")
+ echo "opensuse.13.2"
+ return 0
+ ;;
+ "opensuse.42.1")
+ echo "opensuse.42.1"
+ return 0
+ ;;
+ "opensuse.42.3")
+ echo "opensuse.42.3"
+ return 0
+ ;;
+ "rhel.7"*)
+ echo "rhel"
+ return 0
+ ;;
+ "ubuntu.14.04")
+ echo "ubuntu"
+ return 0
+ ;;
+ "ubuntu.16.04")
+ echo "ubuntu.16.04"
+ return 0
+ ;;
+ "ubuntu.16.10")
+ echo "ubuntu.16.10"
+ return 0
+ ;;
+ "ubuntu.18.04")
+ echo "ubuntu.18.04"
+ return 0
+ ;;
+ "alpine.3.4.3")
+ echo "alpine"
+ return 0
+ ;;
+ esac
+ return 1
+}
+
+get_legacy_os_name() {
+ eval $invocation
+
+ local uname=$(uname)
+ if [ "$uname" = "Darwin" ]; then
+ echo "osx"
+ return 0
+ elif [ -n "$runtime_id" ]; then
+ echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}")
+ return 0
+ else
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ os=$(get_legacy_os_name_from_platform "$ID${VERSION_ID:+.${VERSION_ID}}" || echo "")
+ if [ -n "$os" ]; then
+ echo "$os"
+ return 0
+ fi
+ fi
+ fi
+
+ say_verbose "Distribution specific OS name and version could not be detected: UName = $uname"
+ return 1
+}
+
+get_linux_platform_name() {
+ eval $invocation
+
+ if [ -n "$runtime_id" ]; then
+ echo "${runtime_id%-*}"
+ return 0
+ else
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ echo "$ID${VERSION_ID:+.${VERSION_ID}}"
+ return 0
+ elif [ -e /etc/redhat-release ]; then
+ local redhatRelease=$(&1 || true) | grep -q musl
+}
+
+get_current_os_name() {
+ eval $invocation
+
+ local uname=$(uname)
+ if [ "$uname" = "Darwin" ]; then
+ echo "osx"
+ return 0
+ elif [ "$uname" = "FreeBSD" ]; then
+ echo "freebsd"
+ return 0
+ elif [ "$uname" = "Linux" ]; then
+ local linux_platform_name=""
+ linux_platform_name="$(get_linux_platform_name)" || true
+
+ if [ "$linux_platform_name" = "rhel.6" ]; then
+ echo $linux_platform_name
+ return 0
+ elif is_musl_based_distro; then
+ echo "linux-musl"
+ return 0
+ elif [ "$linux_platform_name" = "linux-musl" ]; then
+ echo "linux-musl"
+ return 0
+ else
+ echo "linux"
+ return 0
+ fi
+ fi
+
+ say_err "OS name could not be detected: UName = $uname"
+ return 1
+}
+
+machine_has() {
+ eval $invocation
+
+ command -v "$1" > /dev/null 2>&1
+ return $?
+}
+
+check_min_reqs() {
+ local hasMinimum=false
+ if machine_has "curl"; then
+ hasMinimum=true
+ elif machine_has "wget"; then
+ hasMinimum=true
+ fi
+
+ if [ "$hasMinimum" = "false" ]; then
+ say_err "curl (recommended) or wget are required to download dotnet. Install missing prerequisite to proceed."
+ return 1
+ fi
+ return 0
+}
+
+# args:
+# input - $1
+to_lowercase() {
+ #eval $invocation
+
+ echo "$1" | tr '[:upper:]' '[:lower:]'
+ return 0
+}
+
+# args:
+# input - $1
+remove_trailing_slash() {
+ #eval $invocation
+
+ local input="${1:-}"
+ echo "${input%/}"
+ return 0
+}
+
+# args:
+# input - $1
+remove_beginning_slash() {
+ #eval $invocation
+
+ local input="${1:-}"
+ echo "${input#/}"
+ return 0
+}
+
+# args:
+# root_path - $1
+# child_path - $2 - this parameter can be empty
+combine_paths() {
+ eval $invocation
+
+ # TODO: Consider making it work with any number of paths. For now:
+ if [ ! -z "${3:-}" ]; then
+ say_err "combine_paths: Function takes two parameters."
+ return 1
+ fi
+
+ local root_path="$(remove_trailing_slash "$1")"
+ local child_path="$(remove_beginning_slash "${2:-}")"
+ say_verbose "combine_paths: root_path=$root_path"
+ say_verbose "combine_paths: child_path=$child_path"
+ echo "$root_path/$child_path"
+ return 0
+}
+
+get_machine_architecture() {
+ eval $invocation
+
+ if command -v uname > /dev/null; then
+ CPUName=$(uname -m)
+ case $CPUName in
+ armv1*|armv2*|armv3*|armv4*|armv5*|armv6*)
+ echo "armv6-or-below"
+ return 0
+ ;;
+ armv*l)
+ echo "arm"
+ return 0
+ ;;
+ aarch64|arm64)
+ if [ "$(getconf LONG_BIT)" -lt 64 ]; then
+ # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
+ echo "arm"
+ return 0
+ fi
+ echo "arm64"
+ return 0
+ ;;
+ s390x)
+ echo "s390x"
+ return 0
+ ;;
+ ppc64le)
+ echo "ppc64le"
+ return 0
+ ;;
+ loongarch64)
+ echo "loongarch64"
+ return 0
+ ;;
+ riscv64)
+ echo "riscv64"
+ return 0
+ ;;
+ powerpc|ppc)
+ echo "ppc"
+ return 0
+ ;;
+ esac
+ fi
+
+ # Always default to 'x64'
+ echo "x64"
+ return 0
+}
+
+# args:
+# architecture - $1
+get_normalized_architecture_from_architecture() {
+ eval $invocation
+
+ local architecture="$(to_lowercase "$1")"
+
+ if [[ $architecture == \ ]]; then
+ machine_architecture="$(get_machine_architecture)"
+ if [[ "$machine_architecture" == "armv6-or-below" ]]; then
+ say_err "Architecture \`$machine_architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues"
+ return 1
+ fi
+
+ echo $machine_architecture
+ return 0
+ fi
+
+ case "$architecture" in
+ amd64|x64)
+ echo "x64"
+ return 0
+ ;;
+ arm)
+ echo "arm"
+ return 0
+ ;;
+ arm64)
+ echo "arm64"
+ return 0
+ ;;
+ s390x)
+ echo "s390x"
+ return 0
+ ;;
+ ppc64le)
+ echo "ppc64le"
+ return 0
+ ;;
+ loongarch64)
+ echo "loongarch64"
+ return 0
+ ;;
+ esac
+
+ say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues"
+ return 1
+}
+
+# args:
+# version - $1
+# channel - $2
+# architecture - $3
+get_normalized_architecture_for_specific_sdk_version() {
+ eval $invocation
+
+ local is_version_support_arm64="$(is_arm64_supported "$1")"
+ local is_channel_support_arm64="$(is_arm64_supported "$2")"
+ local architecture="$3";
+ local osname="$(get_current_os_name)"
+
+ if [ "$osname" == "osx" ] && [ "$architecture" == "arm64" ] && { [ "$is_version_support_arm64" = false ] || [ "$is_channel_support_arm64" = false ]; }; then
+ #check if rosetta is installed
+ if [ "$(/usr/bin/pgrep oahd >/dev/null 2>&1;echo $?)" -eq 0 ]; then
+ say_verbose "Changing user architecture from '$architecture' to 'x64' because .NET SDKs prior to version 6.0 do not support arm64."
+ echo "x64"
+ return 0;
+ else
+ say_err "Architecture \`$architecture\` is not supported for .NET SDK version \`$version\`. Please install Rosetta to allow emulation of the \`$architecture\` .NET SDK on this platform"
+ return 1
+ fi
+ fi
+
+ echo "$architecture"
+ return 0
+}
+
+# args:
+# version or channel - $1
+is_arm64_supported() {
+ #any channel or version that starts with the specified versions
+ case "$1" in
+ ( "1"* | "2"* | "3"* | "4"* | "5"*)
+ echo false
+ return 0
+ esac
+
+ echo true
+ return 0
+}
+
+# args:
+# user_defined_os - $1
+get_normalized_os() {
+ eval $invocation
+
+ local osname="$(to_lowercase "$1")"
+ if [ ! -z "$osname" ]; then
+ case "$osname" in
+ osx | freebsd | rhel.6 | linux-musl | linux)
+ echo "$osname"
+ return 0
+ ;;
+ macos)
+ osname='osx'
+ echo "$osname"
+ return 0
+ ;;
+ *)
+ say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, macos, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
+ return 1
+ ;;
+ esac
+ else
+ osname="$(get_current_os_name)" || return 1
+ fi
+ echo "$osname"
+ return 0
+}
+
+# args:
+# quality - $1
+get_normalized_quality() {
+ eval $invocation
+
+ local quality="$(to_lowercase "$1")"
+ if [ ! -z "$quality" ]; then
+ case "$quality" in
+ daily | signed | validated | preview)
+ echo "$quality"
+ return 0
+ ;;
+ ga)
+ #ga quality is available without specifying quality, so normalizing it to empty
+ return 0
+ ;;
+ *)
+ say_err "'$quality' is not a supported value for --quality option. Supported values are: daily, signed, validated, preview, ga. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
+ return 1
+ ;;
+ esac
+ fi
+ return 0
+}
+
+# args:
+# channel - $1
+get_normalized_channel() {
+ eval $invocation
+
+ local channel="$(to_lowercase "$1")"
+
+ if [[ $channel == current ]]; then
+ say_warning 'Value "Current" is deprecated for -Channel option. Use "STS" instead.'
+ fi
+
+ if [[ $channel == release/* ]]; then
+ say_warning 'Using branch name with -Channel option is no longer supported with newer releases. Use -Quality option with a channel in X.Y format instead.';
+ fi
+
+ if [ ! -z "$channel" ]; then
+ case "$channel" in
+ lts)
+ echo "LTS"
+ return 0
+ ;;
+ sts)
+ echo "STS"
+ return 0
+ ;;
+ current)
+ echo "STS"
+ return 0
+ ;;
+ *)
+ echo "$channel"
+ return 0
+ ;;
+ esac
+ fi
+
+ return 0
+}
+
+# args:
+# runtime - $1
+get_normalized_product() {
+ eval $invocation
+
+ local product=""
+ local runtime="$(to_lowercase "$1")"
+ if [[ "$runtime" == "dotnet" ]]; then
+ product="dotnet-runtime"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ product="aspnetcore-runtime"
+ elif [ -z "$runtime" ]; then
+ product="dotnet-sdk"
+ fi
+ echo "$product"
+ return 0
+}
+
+# The version text returned from the feeds is a 1-line or 2-line string:
+# For the SDK and the dotnet runtime (2 lines):
+# Line 1: # commit_hash
+# Line 2: # 4-part version
+# For the aspnetcore runtime (1 line):
+# Line 1: # 4-part version
+
+# args:
+# version_text - stdin
+get_version_from_latestversion_file_content() {
+ eval $invocation
+
+ cat | tail -n 1 | sed 's/\r$//'
+ return 0
+}
+
+# args:
+# install_root - $1
+# relative_path_to_package - $2
+# specific_version - $3
+is_dotnet_package_installed() {
+ eval $invocation
+
+ local install_root="$1"
+ local relative_path_to_package="$2"
+ local specific_version="${3//[$'\t\r\n']}"
+
+ local dotnet_package_path="$(combine_paths "$(combine_paths "$install_root" "$relative_path_to_package")" "$specific_version")"
+ say_verbose "is_dotnet_package_installed: dotnet_package_path=$dotnet_package_path"
+
+ if [ -d "$dotnet_package_path" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+# args:
+# downloaded file - $1
+# remote_file_size - $2
+validate_remote_local_file_sizes()
+{
+ eval $invocation
+
+ local downloaded_file="$1"
+ local remote_file_size="$2"
+ local file_size=''
+
+ if [[ "$OSTYPE" == "linux-gnu"* ]]; then
+ file_size="$(stat -c '%s' "$downloaded_file")"
+ elif [[ "$OSTYPE" == "darwin"* ]]; then
+ # hardcode in order to avoid conflicts with GNU stat
+ file_size="$(/usr/bin/stat -f '%z' "$downloaded_file")"
+ fi
+
+ if [ -n "$file_size" ]; then
+ say "Downloaded file size is $file_size bytes."
+
+ if [ -n "$remote_file_size" ] && [ -n "$file_size" ]; then
+ if [ "$remote_file_size" -ne "$file_size" ]; then
+ say "The remote and local file sizes are not equal. The remote file size is $remote_file_size bytes and the local size is $file_size bytes. The local package may be corrupted."
+ else
+ say "The remote and local file sizes are equal."
+ fi
+ fi
+
+ else
+ say "Either downloaded or local package size can not be measured. One of them may be corrupted."
+ fi
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+get_version_from_latestversion_file() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+
+ local version_file_url=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ version_file_url="$azure_feed/Runtime/$channel/latest.version"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ version_file_url="$azure_feed/aspnetcore/Runtime/$channel/latest.version"
+ elif [ -z "$runtime" ]; then
+ version_file_url="$azure_feed/Sdk/$channel/latest.version"
+ else
+ say_err "Invalid value for \$runtime"
+ return 1
+ fi
+ say_verbose "get_version_from_latestversion_file: latest url: $version_file_url"
+
+ download "$version_file_url" || return $?
+ return 0
+}
+
+# args:
+# json_file - $1
+parse_globaljson_file_for_version() {
+ eval $invocation
+
+ local json_file="$1"
+ if [ ! -f "$json_file" ]; then
+ say_err "Unable to find \`$json_file\`"
+ return 1
+ fi
+
+ sdk_section=$(cat $json_file | tr -d "\r" | awk '/"sdk"/,/}/')
+ if [ -z "$sdk_section" ]; then
+ say_err "Unable to parse the SDK node in \`$json_file\`"
+ return 1
+ fi
+
+ sdk_list=$(echo $sdk_section | awk -F"[{}]" '{print $2}')
+ sdk_list=${sdk_list//[\" ]/}
+ sdk_list=${sdk_list//,/$'\n'}
+
+ local version_info=""
+ while read -r line; do
+ IFS=:
+ while read -r key value; do
+ if [[ "$key" == "version" ]]; then
+ version_info=$value
+ fi
+ done <<< "$line"
+ done <<< "$sdk_list"
+ if [ -z "$version_info" ]; then
+ say_err "Unable to find the SDK:version node in \`$json_file\`"
+ return 1
+ fi
+
+ unset IFS;
+ echo "$version_info"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# version - $4
+# json_file - $5
+get_specific_version_from_version() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local version="$(to_lowercase "$4")"
+ local json_file="$5"
+
+ if [ -z "$json_file" ]; then
+ if [[ "$version" == "latest" ]]; then
+ local version_info
+ version_info="$(get_version_from_latestversion_file "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
+ say_verbose "get_specific_version_from_version: version_info=$version_info"
+ echo "$version_info" | get_version_from_latestversion_file_content
+ return 0
+ else
+ echo "$version"
+ return 0
+ fi
+ else
+ local version_info
+ version_info="$(parse_globaljson_file_for_version "$json_file")" || return 1
+ echo "$version_info"
+ return 0
+ fi
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# specific_version - $4
+# normalized_os - $5
+construct_download_link() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local specific_version="${4//[$'\t\r\n']}"
+ local specific_product_version="$(get_specific_product_version "$1" "$4")"
+ local osname="$5"
+
+ local download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ elif [ -z "$runtime" ]; then
+ download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ else
+ return 1
+ fi
+
+ echo "$download_link"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# specific_version - $2
+# download link - $3 (optional)
+get_specific_product_version() {
+ # If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
+ # to resolve the version of what's in the folder, superseding the specified version.
+ # if 'productVersion.txt' is missing but download link is already available, product version will be taken from download link
+ eval $invocation
+
+ local azure_feed="$1"
+ local specific_version="${2//[$'\t\r\n']}"
+ local package_download_link=""
+ if [ $# -gt 2 ]; then
+ local package_download_link="$3"
+ fi
+ local specific_product_version=null
+
+ # Try to get the version number, using the productVersion.txt file located next to the installer file.
+ local download_links=($(get_specific_product_version_url "$azure_feed" "$specific_version" true "$package_download_link")
+ $(get_specific_product_version_url "$azure_feed" "$specific_version" false "$package_download_link"))
+
+ for download_link in "${download_links[@]}"
+ do
+ say_verbose "Checking for the existence of $download_link"
+
+ if machine_has "curl"
+ then
+ if ! specific_product_version=$(curl -s --fail "${download_link}${feed_credential}" 2>&1); then
+ continue
+ else
+ echo "${specific_product_version//[$'\t\r\n']}"
+ return 0
+ fi
+
+ elif machine_has "wget"
+ then
+ specific_product_version=$(wget -qO- "${download_link}${feed_credential}" 2>&1)
+ if [ $? = 0 ]; then
+ echo "${specific_product_version//[$'\t\r\n']}"
+ return 0
+ fi
+ fi
+ done
+
+ # Getting the version number with productVersion.txt has failed. Try parsing the download link for a version number.
+ say_verbose "Failed to get the version using productVersion.txt file. Download link will be parsed instead."
+ specific_product_version="$(get_product_specific_version_from_download_link "$package_download_link" "$specific_version")"
+ echo "${specific_product_version//[$'\t\r\n']}"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# specific_version - $2
+# is_flattened - $3
+# download link - $4 (optional)
+get_specific_product_version_url() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local specific_version="$2"
+ local is_flattened="$3"
+ local package_download_link=""
+ if [ $# -gt 3 ]; then
+ local package_download_link="$4"
+ fi
+
+ local pvFileName="productVersion.txt"
+ if [ "$is_flattened" = true ]; then
+ if [ -z "$runtime" ]; then
+ pvFileName="sdk-productVersion.txt"
+ elif [[ "$runtime" == "dotnet" ]]; then
+ pvFileName="runtime-productVersion.txt"
+ else
+ pvFileName="$runtime-productVersion.txt"
+ fi
+ fi
+
+ local download_link=null
+
+ if [ -z "$package_download_link" ]; then
+ if [[ "$runtime" == "dotnet" ]]; then
+ download_link="$azure_feed/Runtime/$specific_version/${pvFileName}"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ download_link="$azure_feed/aspnetcore/Runtime/$specific_version/${pvFileName}"
+ elif [ -z "$runtime" ]; then
+ download_link="$azure_feed/Sdk/$specific_version/${pvFileName}"
+ else
+ return 1
+ fi
+ else
+ download_link="${package_download_link%/*}/${pvFileName}"
+ fi
+
+ say_verbose "Constructed productVersion link: $download_link"
+ echo "$download_link"
+ return 0
+}
+
+# args:
+# download link - $1
+# specific version - $2
+get_product_specific_version_from_download_link()
+{
+ eval $invocation
+
+ local download_link="$1"
+ local specific_version="$2"
+ local specific_product_version=""
+
+ if [ -z "$download_link" ]; then
+ echo "$specific_version"
+ return 0
+ fi
+
+ #get filename
+ filename="${download_link##*/}"
+
+ #product specific version follows the product name
+ #for filename 'dotnet-sdk-3.1.404-linux-x64.tar.gz': the product version is 3.1.404
+ IFS='-'
+ read -ra filename_elems <<< "$filename"
+ count=${#filename_elems[@]}
+ if [[ "$count" -gt 2 ]]; then
+ specific_product_version="${filename_elems[2]}"
+ else
+ specific_product_version=$specific_version
+ fi
+ unset IFS;
+ echo "$specific_product_version"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# specific_version - $4
+construct_legacy_download_link() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local specific_version="${4//[$'\t\r\n']}"
+
+ local distro_specific_osname
+ distro_specific_osname="$(get_legacy_os_name)" || return 1
+
+ local legacy_download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ legacy_download_link="$azure_feed/Runtime/$specific_version/dotnet-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+ elif [ -z "$runtime" ]; then
+ legacy_download_link="$azure_feed/Sdk/$specific_version/dotnet-dev-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+ else
+ return 1
+ fi
+
+ echo "$legacy_download_link"
+ return 0
+}
+
+get_user_install_path() {
+ eval $invocation
+
+ if [ ! -z "${DOTNET_INSTALL_DIR:-}" ]; then
+ echo "$DOTNET_INSTALL_DIR"
+ else
+ echo "$HOME/.dotnet"
+ fi
+ return 0
+}
+
+# args:
+# install_dir - $1
+resolve_installation_path() {
+ eval $invocation
+
+ local install_dir=$1
+ if [ "$install_dir" = "" ]; then
+ local user_install_path="$(get_user_install_path)"
+ say_verbose "resolve_installation_path: user_install_path=$user_install_path"
+ echo "$user_install_path"
+ return 0
+ fi
+
+ echo "$install_dir"
+ return 0
+}
+
+# args:
+# relative_or_absolute_path - $1
+get_absolute_path() {
+ eval $invocation
+
+ local relative_or_absolute_path=$1
+ echo "$(cd "$(dirname "$1")" && pwd -P)/$(basename "$1")"
+ return 0
+}
+
+# args:
+# input_files - stdin
+# root_path - $1
+# out_path - $2
+# override - $3
+copy_files_or_dirs_from_list() {
+ eval $invocation
+
+ local root_path="$(remove_trailing_slash "$1")"
+ local out_path="$(remove_trailing_slash "$2")"
+ local override="$3"
+ local osname="$(get_current_os_name)"
+ local override_switch=$(
+ if [ "$override" = false ]; then
+ if [ "$osname" = "linux-musl" ]; then
+ printf -- "-u";
+ else
+ printf -- "-n";
+ fi
+ fi)
+
+ cat | uniq | while read -r file_path; do
+ local path="$(remove_beginning_slash "${file_path#$root_path}")"
+ local target="$out_path/$path"
+ if [ "$override" = true ] || (! ([ -d "$target" ] || [ -e "$target" ])); then
+ mkdir -p "$out_path/$(dirname "$path")"
+ if [ -d "$target" ]; then
+ rm -rf "$target"
+ fi
+ cp -R $override_switch "$root_path/$path" "$target"
+ fi
+ done
+}
+
+# args:
+# zip_uri - $1
+get_remote_file_size() {
+ local zip_uri="$1"
+
+ if machine_has "curl"; then
+ file_size=$(curl -sI "$zip_uri" | grep -i content-length | awk '{ num = $2 + 0; print num }')
+ elif machine_has "wget"; then
+ file_size=$(wget --spider --server-response -O /dev/null "$zip_uri" 2>&1 | grep -i 'Content-Length:' | awk '{ num = $2 + 0; print num }')
+ else
+ say "Neither curl nor wget is available on this system."
+ return
+ fi
+
+ if [ -n "$file_size" ]; then
+ say "Remote file $zip_uri size is $file_size bytes."
+ echo "$file_size"
+ else
+ say_verbose "Content-Length header was not extracted for $zip_uri."
+ echo ""
+ fi
+}
+
+# args:
+# zip_path - $1
+# out_path - $2
+# remote_file_size - $3
+extract_dotnet_package() {
+ eval $invocation
+
+ local zip_path="$1"
+ local out_path="$2"
+ local remote_file_size="$3"
+
+ local temp_out_path="$(mktemp -d "$temporary_file_template")"
+
+ local failed=false
+ tar -xzf "$zip_path" -C "$temp_out_path" > /dev/null || failed=true
+
+ local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/'
+ find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false
+ find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
+
+ validate_remote_local_file_sizes "$zip_path" "$remote_file_size"
+
+ rm -rf "$temp_out_path"
+ if [ -z ${keep_zip+x} ]; then
+ rm -f "$zip_path" && say_verbose "Temporary archive file $zip_path was removed"
+ fi
+
+ if [ "$failed" = true ]; then
+ say_err "Extraction failed"
+ return 1
+ fi
+ return 0
+}
+
+# args:
+# remote_path - $1
+# disable_feed_credential - $2
+get_http_header()
+{
+ eval $invocation
+ local remote_path="$1"
+ local disable_feed_credential="$2"
+
+ local failed=false
+ local response
+ if machine_has "curl"; then
+ get_http_header_curl $remote_path $disable_feed_credential || failed=true
+ elif machine_has "wget"; then
+ get_http_header_wget $remote_path $disable_feed_credential || failed=true
+ else
+ failed=true
+ fi
+ if [ "$failed" = true ]; then
+ say_verbose "Failed to get HTTP header: '$remote_path'."
+ return 1
+ fi
+ return 0
+}
+
+# args:
+# remote_path - $1
+# disable_feed_credential - $2
+get_http_header_curl() {
+ eval $invocation
+ local remote_path="$1"
+ local disable_feed_credential="$2"
+
+ remote_path_with_credential="$remote_path"
+ if [ "$disable_feed_credential" = false ]; then
+ remote_path_with_credential+="$feed_credential"
+ fi
+
+ curl_options="-I -sSL --retry 5 --retry-delay 2 --connect-timeout 15 "
+ curl $curl_options "$remote_path_with_credential" 2>&1 || return 1
+ return 0
+}
+
+# args:
+# remote_path - $1
+# disable_feed_credential - $2
+get_http_header_wget() {
+ eval $invocation
+ local remote_path="$1"
+ local disable_feed_credential="$2"
+ local wget_options="-q -S --spider --tries 5 "
+
+ local wget_options_extra=''
+
+ # Test for options that aren't supported on all wget implementations.
+ if [[ $(wget -h 2>&1 | grep -E 'waitretry|connect-timeout') ]]; then
+ wget_options_extra="--waitretry 2 --connect-timeout 15 "
+ else
+ say "wget extra options are unavailable for this environment"
+ fi
+
+ remote_path_with_credential="$remote_path"
+ if [ "$disable_feed_credential" = false ]; then
+ remote_path_with_credential+="$feed_credential"
+ fi
+
+ wget $wget_options $wget_options_extra "$remote_path_with_credential" 2>&1
+
+ return $?
+}
+
+# args:
+# remote_path - $1
+# [out_path] - $2 - stdout if not provided
+download() {
+ eval $invocation
+
+ local remote_path="$1"
+ local out_path="${2:-}"
+
+ if [[ "$remote_path" != "http"* ]]; then
+ cp "$remote_path" "$out_path"
+ return $?
+ fi
+
+ local failed=false
+ local attempts=0
+ while [ $attempts -lt 3 ]; do
+ attempts=$((attempts+1))
+ failed=false
+ if machine_has "curl"; then
+ downloadcurl "$remote_path" "$out_path" || failed=true
+ elif machine_has "wget"; then
+ downloadwget "$remote_path" "$out_path" || failed=true
+ else
+ say_err "Missing dependency: neither curl nor wget was found."
+ exit 1
+ fi
+
+ if [ "$failed" = false ] || [ $attempts -ge 3 ] || { [ ! -z $http_code ] && [ $http_code = "404" ]; }; then
+ break
+ fi
+
+ say "Download attempt #$attempts has failed: $http_code $download_error_msg"
+ say "Attempt #$((attempts+1)) will start in $((attempts*10)) seconds."
+ sleep $((attempts*10))
+ done
+
+ if [ "$failed" = true ]; then
+ say_verbose "Download failed: $remote_path"
+ return 1
+ fi
+ return 0
+}
+
+# Updates global variables $http_code and $download_error_msg
+downloadcurl() {
+ eval $invocation
+ unset http_code
+ unset download_error_msg
+ local remote_path="$1"
+ local out_path="${2:-}"
+ # Append feed_credential as late as possible before calling curl to avoid logging feed_credential
+ # Avoid passing URI with credentials to functions: note, most of them echoing parameters of invocation in verbose output.
+ local remote_path_with_credential="${remote_path}${feed_credential}"
+ local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
+ local curl_exit_code=0;
+ if [ -z "$out_path" ]; then
+ curl $curl_options "$remote_path_with_credential" 2>&1
+ curl_exit_code=$?
+ else
+ curl $curl_options -o "$out_path" "$remote_path_with_credential" 2>&1
+ curl_exit_code=$?
+ fi
+
+ if [ $curl_exit_code -gt 0 ]; then
+ download_error_msg="Unable to download $remote_path."
+ # Check for curl timeout codes
+ if [[ $curl_exit_code == 7 || $curl_exit_code == 28 ]]; then
+ download_error_msg+=" Failed to reach the server: connection timeout."
+ else
+ local disable_feed_credential=false
+ local response=$(get_http_header_curl $remote_path $disable_feed_credential)
+ http_code=$( echo "$response" | awk '/^HTTP/{print $2}' | tail -1 )
+ if [[ ! -z $http_code && $http_code != 2* ]]; then
+ download_error_msg+=" Returned HTTP status code: $http_code."
+ fi
+ fi
+ say_verbose "$download_error_msg"
+ return 1
+ fi
+ return 0
+}
+
+
+# Updates global variables $http_code and $download_error_msg
+downloadwget() {
+ eval $invocation
+ unset http_code
+ unset download_error_msg
+ local remote_path="$1"
+ local out_path="${2:-}"
+ # Append feed_credential as late as possible before calling wget to avoid logging feed_credential
+ local remote_path_with_credential="${remote_path}${feed_credential}"
+ local wget_options="--tries 20 "
+
+ local wget_options_extra=''
+ local wget_result=''
+
+ # Test for options that aren't supported on all wget implementations.
+ if [[ $(wget -h 2>&1 | grep -E 'waitretry|connect-timeout') ]]; then
+ wget_options_extra="--waitretry 2 --connect-timeout 15 "
+ else
+ say "wget extra options are unavailable for this environment"
+ fi
+
+ if [ -z "$out_path" ]; then
+ wget -q $wget_options $wget_options_extra -O - "$remote_path_with_credential" 2>&1
+ wget_result=$?
+ else
+ wget $wget_options $wget_options_extra -O "$out_path" "$remote_path_with_credential" 2>&1
+ wget_result=$?
+ fi
+
+ if [[ $wget_result != 0 ]]; then
+ local disable_feed_credential=false
+ local response=$(get_http_header_wget $remote_path $disable_feed_credential)
+ http_code=$( echo "$response" | awk '/^ HTTP/{print $2}' | tail -1 )
+ download_error_msg="Unable to download $remote_path."
+ if [[ ! -z $http_code && $http_code != 2* ]]; then
+ download_error_msg+=" Returned HTTP status code: $http_code."
+ # wget exit code 4 stands for network-issue
+ elif [[ $wget_result == 4 ]]; then
+ download_error_msg+=" Failed to reach the server: connection timeout."
+ fi
+ say_verbose "$download_error_msg"
+ return 1
+ fi
+
+ return 0
+}
+
+get_download_link_from_aka_ms() {
+ eval $invocation
+
+ #quality is not supported for LTS or STS channel
+ #STS maps to current
+ if [[ ! -z "$normalized_quality" && ("$normalized_channel" == "LTS" || "$normalized_channel" == "STS") ]]; then
+ normalized_quality=""
+ say_warning "Specifying quality for STS or LTS channel is not supported, the quality will be ignored."
+ fi
+
+ say_verbose "Retrieving primary payload URL from aka.ms for channel: '$normalized_channel', quality: '$normalized_quality', product: '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'."
+
+ #construct aka.ms link
+ aka_ms_link="https://aka.ms/dotnet"
+ if [ "$internal" = true ]; then
+ aka_ms_link="$aka_ms_link/internal"
+ fi
+ aka_ms_link="$aka_ms_link/$normalized_channel"
+ if [[ ! -z "$normalized_quality" ]]; then
+ aka_ms_link="$aka_ms_link/$normalized_quality"
+ fi
+ aka_ms_link="$aka_ms_link/$normalized_product-$normalized_os-$normalized_architecture.tar.gz"
+ say_verbose "Constructed aka.ms link: '$aka_ms_link'."
+
+ #get HTTP response
+ #do not pass credentials as a part of the $aka_ms_link and do not apply credentials in the get_http_header function
+ #otherwise the redirect link would have credentials as well
+ #it would result in applying credentials twice to the resulting link and thus breaking it, and in echoing credentials to the output as a part of redirect link
+ disable_feed_credential=true
+ response="$(get_http_header $aka_ms_link $disable_feed_credential)"
+
+ say_verbose "Received response: $response"
+ # Get results of all the redirects.
+ http_codes=$( echo "$response" | awk '$1 ~ /^HTTP/ {print $2}' )
+ # They all need to be 301, otherwise some links are broken (except for the last, which is not a redirect but 200 or 404).
+ broken_redirects=$( echo "$http_codes" | sed '$d' | grep -v '301' )
+ # The response may end without final code 2xx/4xx/5xx somehow, e.g. network restrictions on www.bing.com causes redirecting to bing.com fails with connection refused.
+ # In this case it should not exclude the last.
+ last_http_code=$( echo "$http_codes" | tail -n 1 )
+ if ! [[ $last_http_code =~ ^(2|4|5)[0-9][0-9]$ ]]; then
+ broken_redirects=$( echo "$http_codes" | grep -v '301' )
+ fi
+
+ # All HTTP codes are 301 (Moved Permanently), the redirect link exists.
+ if [[ -z "$broken_redirects" ]]; then
+ aka_ms_download_link=$( echo "$response" | awk '$1 ~ /^Location/{print $2}' | tail -1 | tr -d '\r')
+
+ if [[ -z "$aka_ms_download_link" ]]; then
+ say_verbose "The aka.ms link '$aka_ms_link' is not valid: failed to get redirect location."
+ return 1
+ fi
+
+ say_verbose "The redirect location retrieved: '$aka_ms_download_link'."
+ return 0
+ else
+ say_verbose "The aka.ms link '$aka_ms_link' is not valid: received HTTP code: $(echo "$broken_redirects" | paste -sd "," -)."
+ return 1
+ fi
+}
+
+get_feeds_to_use()
+{
+ feeds=(
+ "https://dotnetcli.azureedge.net/dotnet"
+ "https://dotnetbuilds.azureedge.net/public"
+ )
+
+ if [[ -n "$azure_feed" ]]; then
+ feeds=("$azure_feed")
+ fi
+
+ if [[ "$no_cdn" == "true" ]]; then
+ feeds=(
+ "https://dotnetcli.blob.core.windows.net/dotnet"
+ "https://dotnetbuilds.blob.core.windows.net/public"
+ )
+
+ if [[ -n "$uncached_feed" ]]; then
+ feeds=("$uncached_feed")
+ fi
+ fi
+}
+
+# THIS FUNCTION MAY EXIT (if the determined version is already installed).
+generate_download_links() {
+
+ download_links=()
+ specific_versions=()
+ effective_versions=()
+ link_types=()
+
+ # If generate_akams_links returns false, no fallback to old links. Just terminate.
+ # This function may also 'exit' (if the determined version is already installed).
+ generate_akams_links || return
+
+ # Check other feeds only if we haven't been able to find an aka.ms link.
+ if [[ "${#download_links[@]}" -lt 1 ]]; then
+ for feed in ${feeds[@]}
+ do
+ # generate_regular_links may also 'exit' (if the determined version is already installed).
+ generate_regular_links $feed || return
+ done
+ fi
+
+ if [[ "${#download_links[@]}" -eq 0 ]]; then
+ say_err "Failed to resolve the exact version number."
+ return 1
+ fi
+
+ say_verbose "Generated ${#download_links[@]} links."
+ for link_index in ${!download_links[@]}
+ do
+ say_verbose "Link $link_index: ${link_types[$link_index]}, ${effective_versions[$link_index]}, ${download_links[$link_index]}"
+ done
+}
+
+# THIS FUNCTION MAY EXIT (if the determined version is already installed).
+generate_akams_links() {
+ local valid_aka_ms_link=true;
+
+ normalized_version="$(to_lowercase "$version")"
+ if [[ "$normalized_version" != "latest" ]] && [ -n "$normalized_quality" ]; then
+ say_err "Quality and Version options are not allowed to be specified simultaneously. See https://learn.microsoft.com/dotnet/core/tools/dotnet-install-script#options for details."
+ return 1
+ fi
+
+ if [[ -n "$json_file" || "$normalized_version" != "latest" ]]; then
+ # aka.ms links are not needed when exact version is specified via command or json file
+ return
+ fi
+
+ get_download_link_from_aka_ms || valid_aka_ms_link=false
+
+ if [[ "$valid_aka_ms_link" == true ]]; then
+ say_verbose "Retrieved primary payload URL from aka.ms link: '$aka_ms_download_link'."
+ say_verbose "Downloading using legacy url will not be attempted."
+
+ download_link=$aka_ms_download_link
+
+ #get version from the path
+ IFS='/'
+ read -ra pathElems <<< "$download_link"
+ count=${#pathElems[@]}
+ specific_version="${pathElems[count-2]}"
+ unset IFS;
+ say_verbose "Version: '$specific_version'."
+
+ #Retrieve effective version
+ effective_version="$(get_specific_product_version "$azure_feed" "$specific_version" "$download_link")"
+
+ # Add link info to arrays
+ download_links+=($download_link)
+ specific_versions+=($specific_version)
+ effective_versions+=($effective_version)
+ link_types+=("aka.ms")
+
+ # Check if the SDK version is already installed.
+ if [[ "$dry_run" != true ]] && is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then
+ say "$asset_name with version '$effective_version' is already installed."
+ exit 0
+ fi
+
+ return 0
+ fi
+
+ # if quality is specified - exit with error - there is no fallback approach
+ if [ ! -z "$normalized_quality" ]; then
+ say_err "Failed to locate the latest version in the channel '$normalized_channel' with '$normalized_quality' quality for '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'."
+ say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support."
+ return 1
+ fi
+ say_verbose "Falling back to latest.version file approach."
+}
+
+# THIS FUNCTION MAY EXIT (if the determined version is already installed)
+# args:
+# feed - $1
+generate_regular_links() {
+ local feed="$1"
+ local valid_legacy_download_link=true
+
+ specific_version=$(get_specific_version_from_version "$feed" "$channel" "$normalized_architecture" "$version" "$json_file") || specific_version='0'
+
+ if [[ "$specific_version" == '0' ]]; then
+ say_verbose "Failed to resolve the specific version number using feed '$feed'"
+ return
+ fi
+
+ effective_version="$(get_specific_product_version "$feed" "$specific_version")"
+ say_verbose "specific_version=$specific_version"
+
+ download_link="$(construct_download_link "$feed" "$channel" "$normalized_architecture" "$specific_version" "$normalized_os")"
+ say_verbose "Constructed primary named payload URL: $download_link"
+
+ # Add link info to arrays
+ download_links+=($download_link)
+ specific_versions+=($specific_version)
+ effective_versions+=($effective_version)
+ link_types+=("primary")
+
+ legacy_download_link="$(construct_legacy_download_link "$feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false
+
+ if [ "$valid_legacy_download_link" = true ]; then
+ say_verbose "Constructed legacy named payload URL: $legacy_download_link"
+
+ download_links+=($legacy_download_link)
+ specific_versions+=($specific_version)
+ effective_versions+=($effective_version)
+ link_types+=("legacy")
+ else
+ legacy_download_link=""
+ say_verbose "Cound not construct a legacy_download_link; omitting..."
+ fi
+
+ # Check if the SDK version is already installed.
+ if [[ "$dry_run" != true ]] && is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then
+ say "$asset_name with version '$effective_version' is already installed."
+ exit 0
+ fi
+}
+
+print_dry_run() {
+
+ say "Payload URLs:"
+
+ for link_index in "${!download_links[@]}"
+ do
+ say "URL #$link_index - ${link_types[$link_index]}: ${download_links[$link_index]}"
+ done
+
+ resolved_version=${specific_versions[0]}
+ repeatable_command="./$script_name --version "\""$resolved_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"" --os "\""$normalized_os"\"""
+
+ if [ ! -z "$normalized_quality" ]; then
+ repeatable_command+=" --quality "\""$normalized_quality"\"""
+ fi
+
+ if [[ "$runtime" == "dotnet" ]]; then
+ repeatable_command+=" --runtime "\""dotnet"\"""
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ repeatable_command+=" --runtime "\""aspnetcore"\"""
+ fi
+
+ repeatable_command+="$non_dynamic_parameters"
+
+ if [ -n "$feed_credential" ]; then
+ repeatable_command+=" --feed-credential "\"""\"""
+ fi
+
+ say "Repeatable invocation: $repeatable_command"
+}
+
+calculate_vars() {
+ eval $invocation
+
+ script_name=$(basename "$0")
+ normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
+ say_verbose "Normalized architecture: '$normalized_architecture'."
+ normalized_os="$(get_normalized_os "$user_defined_os")"
+ say_verbose "Normalized OS: '$normalized_os'."
+ normalized_quality="$(get_normalized_quality "$quality")"
+ say_verbose "Normalized quality: '$normalized_quality'."
+ normalized_channel="$(get_normalized_channel "$channel")"
+ say_verbose "Normalized channel: '$normalized_channel'."
+ normalized_product="$(get_normalized_product "$runtime")"
+ say_verbose "Normalized product: '$normalized_product'."
+ install_root="$(resolve_installation_path "$install_dir")"
+ say_verbose "InstallRoot: '$install_root'."
+
+ normalized_architecture="$(get_normalized_architecture_for_specific_sdk_version "$version" "$normalized_channel" "$normalized_architecture")"
+
+ if [[ "$runtime" == "dotnet" ]]; then
+ asset_relative_path="shared/Microsoft.NETCore.App"
+ asset_name=".NET Core Runtime"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ asset_relative_path="shared/Microsoft.AspNetCore.App"
+ asset_name="ASP.NET Core Runtime"
+ elif [ -z "$runtime" ]; then
+ asset_relative_path="sdk"
+ asset_name=".NET Core SDK"
+ fi
+
+ get_feeds_to_use
+}
+
+install_dotnet() {
+ eval $invocation
+ local download_failed=false
+ local download_completed=false
+ local remote_file_size=0
+
+ mkdir -p "$install_root"
+ zip_path="${zip_path:-$(mktemp "$temporary_file_template")}"
+ say_verbose "Archive path: $zip_path"
+
+ for link_index in "${!download_links[@]}"
+ do
+ download_link="${download_links[$link_index]}"
+ specific_version="${specific_versions[$link_index]}"
+ effective_version="${effective_versions[$link_index]}"
+ link_type="${link_types[$link_index]}"
+
+ say "Attempting to download using $link_type link $download_link"
+
+ # The download function will set variables $http_code and $download_error_msg in case of failure.
+ download_failed=false
+ download "$download_link" "$zip_path" 2>&1 || download_failed=true
+
+ if [ "$download_failed" = true ]; then
+ case $http_code in
+ 404)
+ say "The resource at $link_type link '$download_link' is not available."
+ ;;
+ *)
+ say "Failed to download $link_type link '$download_link': $download_error_msg"
+ ;;
+ esac
+ rm -f "$zip_path" 2>&1 && say_verbose "Temporary archive file $zip_path was removed"
+ else
+ download_completed=true
+ break
+ fi
+ done
+
+ if [[ "$download_completed" == false ]]; then
+ say_err "Could not find \`$asset_name\` with version = $specific_version"
+ say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
+ return 1
+ fi
+
+ remote_file_size="$(get_remote_file_size "$download_link")"
+
+ say "Extracting archive from $download_link"
+ extract_dotnet_package "$zip_path" "$install_root" "$remote_file_size" || return 1
+
+ # Check if the SDK version is installed; if not, fail the installation.
+ # if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
+ if [[ $specific_version == *"rtm"* || $specific_version == *"servicing"* ]]; then
+ IFS='-'
+ read -ra verArr <<< "$specific_version"
+ release_version="${verArr[0]}"
+ unset IFS;
+ say_verbose "Checking installation: version = $release_version"
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$release_version"; then
+ say "Installed version is $effective_version"
+ return 0
+ fi
+ fi
+
+ # Check if the standard SDK version is installed.
+ say_verbose "Checking installation: version = $effective_version"
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then
+ say "Installed version is $effective_version"
+ return 0
+ fi
+
+ # Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm.
+ say_err "Failed to verify the version of installed \`$asset_name\`.\nInstallation source: $download_link.\nInstallation location: $install_root.\nReport the bug at https://github.com/dotnet/install-scripts/issues."
+ say_err "\`$asset_name\` with version = $effective_version failed to install with an error."
+ return 1
+}
+
+args=("$@")
+
+local_version_file_relative_path="/.version"
+bin_folder_relative_path=""
+temporary_file_template="${TMPDIR:-/tmp}/dotnet.XXXXXXXXX"
+
+channel="LTS"
+version="Latest"
+json_file=""
+install_dir=""
+architecture=""
+dry_run=false
+no_path=false
+no_cdn=false
+azure_feed=""
+uncached_feed=""
+feed_credential=""
+verbose=false
+runtime=""
+runtime_id=""
+quality=""
+internal=false
+override_non_versioned_files=true
+non_dynamic_parameters=""
+user_defined_os=""
+
+while [ $# -ne 0 ]
+do
+ name="$1"
+ case "$name" in
+ -c|--channel|-[Cc]hannel)
+ shift
+ channel="$1"
+ ;;
+ -v|--version|-[Vv]ersion)
+ shift
+ version="$1"
+ ;;
+ -q|--quality|-[Qq]uality)
+ shift
+ quality="$1"
+ ;;
+ --internal|-[Ii]nternal)
+ internal=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ -i|--install-dir|-[Ii]nstall[Dd]ir)
+ shift
+ install_dir="$1"
+ ;;
+ --arch|--architecture|-[Aa]rch|-[Aa]rchitecture)
+ shift
+ architecture="$1"
+ ;;
+ --os|-[Oo][SS])
+ shift
+ user_defined_os="$1"
+ ;;
+ --shared-runtime|-[Ss]hared[Rr]untime)
+ say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'."
+ if [ -z "$runtime" ]; then
+ runtime="dotnet"
+ fi
+ ;;
+ --runtime|-[Rr]untime)
+ shift
+ runtime="$1"
+ if [[ "$runtime" != "dotnet" ]] && [[ "$runtime" != "aspnetcore" ]]; then
+ say_err "Unsupported value for --runtime: '$1'. Valid values are 'dotnet' and 'aspnetcore'."
+ if [[ "$runtime" == "windowsdesktop" ]]; then
+ say_err "WindowsDesktop archives are manufactured for Windows platforms only."
+ fi
+ exit 1
+ fi
+ ;;
+ --dry-run|-[Dd]ry[Rr]un)
+ dry_run=true
+ ;;
+ --no-path|-[Nn]o[Pp]ath)
+ no_path=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --verbose|-[Vv]erbose)
+ verbose=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --no-cdn|-[Nn]o[Cc]dn)
+ no_cdn=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --azure-feed|-[Aa]zure[Ff]eed)
+ shift
+ azure_feed="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --uncached-feed|-[Uu]ncached[Ff]eed)
+ shift
+ uncached_feed="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --feed-credential|-[Ff]eed[Cc]redential)
+ shift
+ feed_credential="$1"
+ #feed_credential should start with "?", for it to be added to the end of the link.
+ #adding "?" at the beginning of the feed_credential if needed.
+ [[ -z "$(echo $feed_credential)" ]] || [[ $feed_credential == \?* ]] || feed_credential="?$feed_credential"
+ ;;
+ --runtime-id|-[Rr]untime[Ii]d)
+ shift
+ runtime_id="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ say_warning "Use of --runtime-id is obsolete and should be limited to the versions below 2.1. To override architecture, use --architecture option instead. To override OS, use --os option instead."
+ ;;
+ --jsonfile|-[Jj][Ss]on[Ff]ile)
+ shift
+ json_file="$1"
+ ;;
+ --skip-non-versioned-files|-[Ss]kip[Nn]on[Vv]ersioned[Ff]iles)
+ override_non_versioned_files=false
+ non_dynamic_parameters+=" $name"
+ ;;
+ --keep-zip|-[Kk]eep[Zz]ip)
+ keep_zip=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --zip-path|-[Zz]ip[Pp]ath)
+ shift
+ zip_path="$1"
+ ;;
+ -?|--?|-h|--help|-[Hh]elp)
+ script_name="$(basename "$0")"
+ echo ".NET Tools Installer"
+ echo "Usage:"
+ echo " # Install a .NET SDK of a given Quality from a given Channel"
+ echo " $script_name [-c|--channel ] [-q|--quality ]"
+ echo " # Install a .NET SDK of a specific public version"
+ echo " $script_name [-v|--version ]"
+ echo " $script_name -h|-?|--help"
+ echo ""
+ echo "$script_name is a simple command line interface for obtaining dotnet cli."
+ echo " Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
+ echo " - The SDK needs to be installed without user interaction and without admin rights."
+ echo " - The SDK installation doesn't need to persist across multiple CI runs."
+ echo " To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer."
+ echo ""
+ echo "Options:"
+ echo " -c,--channel Download from the channel specified, Defaults to \`$channel\`."
+ echo " -Channel"
+ echo " Possible values:"
+ echo " - STS - the most recent Standard Term Support release"
+ echo " - LTS - the most recent Long Term Support release"
+ echo " - 2-part version in a format A.B - represents a specific release"
+ echo " examples: 2.0; 1.0"
+ echo " - 3-part version in a format A.B.Cxx - represents a specific SDK release"
+ echo " examples: 5.0.1xx, 5.0.2xx."
+ echo " Supported since 5.0 release"
+ echo " Warning: Value 'Current' is deprecated for the Channel parameter. Use 'STS' instead."
+ echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used."
+ echo " -v,--version Use specific VERSION, Defaults to \`$version\`."
+ echo " -Version"
+ echo " Possible values:"
+ echo " - latest - the latest build on specific channel"
+ echo " - 3-part version in a format A.B.C - represents specific version of build"
+ echo " examples: 2.0.0-preview2-006120; 1.1.0"
+ echo " -q,--quality Download the latest build of specified quality in the channel."
+ echo " -Quality"
+ echo " The possible values are: daily, signed, validated, preview, GA."
+ echo " Works only in combination with channel. Not applicable for STS and LTS channels and will be ignored if those channels are used."
+ echo " For SDK use channel in A.B.Cxx format. Using quality for SDK together with channel in A.B format is not supported."
+ echo " Supported since 5.0 release."
+ echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used, and therefore overrides the quality."
+ echo " --internal,-Internal Download internal builds. Requires providing credentials via --feed-credential parameter."
+ echo " --feed-credential Token to access Azure feed. Used as a query string to append to the Azure feed."
+ echo " -FeedCredential This parameter typically is not specified."
+ echo " -i,--install-dir Install under specified location (see Install Location below)"
+ echo " -InstallDir"
+ echo " --architecture Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
+ echo " --arch,-Architecture,-Arch"
+ echo " Possible values: x64, arm, arm64, s390x, ppc64le and loongarch64"
+ echo " --os Specifies operating system to be used when selecting the installer."
+ echo " Overrides the OS determination approach used by the script. Supported values: osx, linux, linux-musl, freebsd, rhel.6."
+ echo " In case any other value is provided, the platform will be determined by the script based on machine configuration."
+ echo " Not supported for legacy links. Use --runtime-id to specify platform for legacy links."
+ echo " Refer to: https://aka.ms/dotnet-os-lifecycle for more information."
+ echo " --runtime Installs a shared runtime only, without the SDK."
+ echo " -Runtime"
+ echo " Possible values:"
+ echo " - dotnet - the Microsoft.NETCore.App shared runtime"
+ echo " - aspnetcore - the Microsoft.AspNetCore.App shared runtime"
+ echo " --dry-run,-DryRun Do not perform installation. Display download link."
+ echo " --no-path, -NoPath Do not set PATH for the current process."
+ echo " --verbose,-Verbose Display diagnostics information."
+ echo " --azure-feed,-AzureFeed For internal use only."
+ echo " Allows using a different storage to download SDK archives from."
+ echo " This parameter is only used if --no-cdn is false."
+ echo " --uncached-feed,-UncachedFeed For internal use only."
+ echo " Allows using a different storage to download SDK archives from."
+ echo " This parameter is only used if --no-cdn is true."
+ echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable."
+ echo " -SkipNonVersionedFiles"
+ echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
+ echo " --jsonfile Determines the SDK version from a user specified global.json file."
+ echo " Note: global.json must have a value for 'SDK:Version'"
+ echo " --keep-zip,-KeepZip If set, downloaded file is kept."
+ echo " --zip-path, -ZipPath If set, downloaded file is stored at the specified path."
+ echo " -?,--?,-h,--help,-Help Shows this help message"
+ echo ""
+ echo "Install Location:"
+ echo " Location is chosen in following order:"
+ echo " - --install-dir option"
+ echo " - Environmental variable DOTNET_INSTALL_DIR"
+ echo " - $HOME/.dotnet"
+ exit 0
+ ;;
+ *)
+ say_err "Unknown argument \`$name\`"
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+say_verbose "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
+say_verbose "- The SDK needs to be installed without user interaction and without admin rights."
+say_verbose "- The SDK installation doesn't need to persist across multiple CI runs."
+say_verbose "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n"
+
+if [ "$internal" = true ] && [ -z "$(echo $feed_credential)" ]; then
+ message="Provide credentials via --feed-credential parameter."
+ if [ "$dry_run" = true ]; then
+ say_warning "$message"
+ else
+ say_err "$message"
+ exit 1
+ fi
+fi
+
+check_min_reqs
+calculate_vars
+# generate_regular_links call below will 'exit' if the determined version is already installed.
+generate_download_links
+
+if [[ "$dry_run" = true ]]; then
+ print_dry_run
+ exit 0
+fi
+
+install_dotnet
+
+bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")"
+if [ "$no_path" = false ]; then
+ say "Adding to current process PATH: \`$bin_path\`. Note: This change will be visible only when sourcing script."
+ export PATH="$bin_path":"$PATH"
+else
+ say "Binaries of dotnet can be found in $bin_path"
+fi
+
+say "Note that the script does not resolve dependencies during installation."
+say "To check the list of dependencies, go to https://learn.microsoft.com/dotnet/core/install, select your operating system and check the \"Dependencies\" section."
+say "Installation finished successfully."
diff --git a/start-infrastructure.sh b/start-infrastructure.sh
new file mode 100644
index 0000000..4512b05
--- /dev/null
+++ b/start-infrastructure.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+# Set global configuration
+DATA_ROOT="/var/tasker"
+POSTGRES_VERSION="latest"
+MONGODB_VERSION="latest"
+
+# Start PostgreSQL
+docker run -d \
+ --name TASKER_POSTGRES \
+ -v "${DATA_ROOT}/postgres:/var/lib/postgresql/data" \
+ -p 5400:5432 \
+ -e POSTGRES_USER=sa \
+ -e POSTGRES_PASSWORD=P@ssword123! \
+ -e POSTGRES_DB=Tasker \
+ postgres:$POSTGRES_VERSION
+
+# Start MongoDB
+docker run -d \
+ --name TASKER_MONGODB \
+ -v "${DATA_ROOT}/mongodb:/data/db" \
+ -p 5401:27017 \
+ mongo:$MONGODB_VERSION
+