Month: July 2024
Exchange online and MGGraph are interfering
I am creating a new script. The script is running unattended. The script doing a couple things, the important part here is: setting a new user a license and setting the new user mailbox Address book policy. The address book is an Exchange online task the license is Graph. I am getting an error.
How to reproduce the error:
1. connect to MgGraph, I am using this command
Connect-MgGraph -TenantId $tenantID -AppId $appID -CertificateThumbprint $CertificateThumbPrint -NoWelcome
do some work, Disconnect-MgGraph
2. Connect to Exchange online, in the same script:
Connect-ExchangeOnline -CertificateThumbPrint $CertificateThumbPrint -AppID $appID -Organization $tenantID -CommandName Get-EXOMailbox,Get-mailbox,Set-mailbox -SkipLoadingCmdletHelp -ShowBanner:$false
The command verbose debug output is this:
DEBUG:
using System;
using System.Net;
using System.Management.Automation;
using Microsoft.Win32.SafeHandles;
using System.Security.Cryptography;
using System.Runtime.InteropServices;
using System.Runtime.ConstrainedExecution;
using System.Runtime.Versioning;
using System.Security;
namespace Microsoft.PowerShell.Commands.PowerShellGet
{
public static class Telemetry
{
public static void TraceMessageArtifactsNotFound(string[] artifactsNotFound, string operationName)
{
Microsoft.PowerShell.Telemetry.Internal.TelemetryAPI.TraceMessage(operationName, new { ArtifactsNotFound = artifactsNotFound });
}
public static void TraceMessageNonPSGalleryRegistration(string sourceLocationType, string sourceLocationHash, string installationPolicy, strin
g packageManagementProvider, string publishLocationHash, string scriptSourceLocationHash, string scriptPublishLocationHash, string operationName)
{
Microsoft.PowerShell.Telemetry.Internal.TelemetryAPI.TraceMessage(operationName, new { SourceLocationType = sourceLocationType, SourceLoca
tionHash = sourceLocationHash, InstallationPolicy = installationPolicy, PackageManagementProvider = packageManagementProvider, PublishLocationHash = p
ublishLocationHash, ScriptSourceLocationHash = scriptSourceLocationHash, ScriptPublishLocationHash = scriptPublishLocationHash });
}
}
/// <summary>
/// Used by Ping-Endpoint function to supply webproxy to HttpClient
/// We cannot use System.Net.WebProxy because this is not available on CoreClr
/// </summary>
public class InternalWebProxy : IWebProxy
{
Uri _proxyUri;
ICredentials _credentials;
public InternalWebProxy(Uri uri, ICredentials credentials)
{
Credentials = credentials;
_proxyUri = uri;
}
/// <summary>
/// Credentials used by WebProxy
/// </summary>
public ICredentials Credentials
{
get
{
return _credentials;
}
set
{
_credentials = value;
}
}
public Uri GetProxy(Uri destination)
{
return _proxyUri;
}
public bool IsBypassed(Uri host)
{
return false;
}
}
[StructLayout(LayoutKind.Sequential, CharSet=CharSet.Unicode)]
public struct CERT_CHAIN_POLICY_PARA {
public CERT_CHAIN_POLICY_PARA(int size) {
cbSize = (uint) size;
dwFlags = 0;
pvExtraPolicyPara = IntPtr.Zero;
}
public uint cbSize;
public uint dwFlags;
public IntPtr pvExtraPolicyPara;
}
[StructLayout(LayoutKind.Sequential, CharSet=CharSet.Unicode)]
public struct CERT_CHAIN_POLICY_STATUS {
public CERT_CHAIN_POLICY_STATUS(int size) {
cbSize = (uint) size;
dwError = 0;
lChainIndex = IntPtr.Zero;
lElementIndex = IntPtr.Zero;
pvExtraPolicyStatus = IntPtr.Zero;
}
public uint cbSize;
public uint dwError;
public IntPtr lChainIndex;
public IntPtr lElementIndex;
public IntPtr pvExtraPolicyStatus;
}
// Internal SafeHandleZeroOrMinusOneIsInvalid class to remove the dependency on .Net Framework 4.6.
public abstract class InternalSafeHandleZeroOrMinusOneIsInvalid : SafeHandle
{
protected InternalSafeHandleZeroOrMinusOneIsInvalid(bool ownsHandle)
: base(IntPtr.Zero, ownsHandle)
{
}
public override bool IsInvalid
{
get
{
return handle == IntPtr.Zero || handle == new IntPtr(-1);
}
}
}
// Internal SafeX509ChainHandle class to remove the dependency on .Net Framework 4.6.
[SecurityCritical]
public sealed class InternalSafeX509ChainHandle : InternalSafeHandleZeroOrMinusOneIsInvalid {
private InternalSafeX509ChainHandle () : base(true) {}
internal InternalSafeX509ChainHandle (IntPtr handle) : base (true) {
SetHandle(handle);
}
internal static InternalSafeX509ChainHandle InvalidHandle {
get { return new InternalSafeX509ChainHandle(IntPtr.Zero); }
}
[SecurityCritical]
override protected bool ReleaseHandle()
{
CertFreeCertificateChain(handle);
return true;
}
[DllImport(“Crypt32.dll”, SetLastError=true)]
[SuppressUnmanagedCodeSecurity,
ResourceExposure(ResourceScope.None),
ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
private static extern void CertFreeCertificateChain(IntPtr handle);
}
public class Win32Helpers
{
[DllImport(“Crypt32.dll”, CharSet=CharSet.Auto, SetLastError=true)]
public extern static
bool CertVerifyCertificateChainPolicy(
[In] IntPtr pszPolicyOID,
[In] SafeX509ChainHandle pChainContext,
[In] ref CERT_CHAIN_POLICY_PARA pPolicyPara,
[In,Out] ref CERT_CHAIN_POLICY_STATUS pPolicyStatus);
[DllImport(“Crypt32.dll”, CharSet=CharSet.Auto, SetLastError=true)]
public static extern
SafeX509ChainHandle CertDuplicateCertificateChain(
[In] IntPtr pChainContext);
[DllImport(“Crypt32.dll”, CharSet=CharSet.Auto, SetLastError=true)]
[ResourceExposure(ResourceScope.None)]
public static extern
SafeX509ChainHandle CertDuplicateCertificateChain(
[In] SafeX509ChainHandle pChainContext);
public static bool IsMicrosoftCertificate([In] SafeX509ChainHandle pChainContext)
{
//————————————————————————-
// CERT_CHAIN_POLICY_MICROSOFT_ROOT
//
// Checks if the last element of the first simple chain contains a
// Microsoft root public key. If it doesn’t contain a Microsoft root
// public key, dwError is set to CERT_E_UNTRUSTEDROOT.
//
// pPolicyPara is optional. However,
// MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG can be set in
// the dwFlags in pPolicyPara to also check for the Microsoft Test Roots.
//
// MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG can be set
// in the dwFlags in pPolicyPara to check for the Microsoft root for
// application signing instead of the Microsoft product root. This flag
// explicitly checks for the application root only and cannot be combined
// with the test root flag.
//
// MICROSOFT_ROOT_CERT_CHAIN_POLICY_DISABLE_FLIGHT_ROOT_FLAG can be set
// in the dwFlags in pPolicyPara to always disable the Flight root.
//
// pvExtraPolicyPara and pvExtraPolicyStatus aren’t used and must be set
// to NULL.
//————————————————————————–
const uint MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG = 0x00010000;
const uint MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG = 0x00020000;
//const uint MICROSOFT_ROOT_CERT_CHAIN_POLICY_DISABLE_FLIGHT_ROOT_FLAG = 0x00040000;
CERT_CHAIN_POLICY_PARA PolicyPara = new CERT_CHAIN_POLICY_PARA(Marshal.SizeOf(typeof(CERT_CHAIN_POLICY_PARA)));
CERT_CHAIN_POLICY_STATUS PolicyStatus = new CERT_CHAIN_POLICY_STATUS(Marshal.SizeOf(typeof(CERT_CHAIN_POLICY_STATUS)));
int CERT_CHAIN_POLICY_MICROSOFT_ROOT = 7;
PolicyPara.dwFlags = (uint) MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG;
bool isMicrosoftRoot = false;
if(CertVerifyCertificateChainPolicy(new IntPtr(CERT_CHAIN_POLICY_MICROSOFT_ROOT),
pChainContext,
ref PolicyPara,
ref PolicyStatus))
{
isMicrosoftRoot = (PolicyStatus.dwError == 0);
}
// Also check for the Microsoft root for application signing if the Microsoft product root verification is unsuccessful.
if(!isMicrosoftRoot)
{
// Some Microsoft modules can be signed with Microsoft Application Root instead of Microsoft Product Root,
// So we need to use the MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG for the certificate verification.
// MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG can not be used
// with MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG,
// so additional CertVerifyCertificateChainPolicy call is required to verify the given certificate is in Microsoft Application Root.
//
CERT_CHAIN_POLICY_PARA PolicyPara2 = new CERT_CHAIN_POLICY_PARA(Marshal.SizeOf(typeof(CERT_CHAIN_POLICY_PARA)));
CERT_CHAIN_POLICY_STATUS PolicyStatus2 = new CERT_CHAIN_POLICY_STATUS(Marshal.SizeOf(typeof(CERT_CHAIN_POLICY_STATUS)));
PolicyPara2.dwFlags = (uint) MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG;
if(CertVerifyCertificateChainPolicy(new IntPtr(CERT_CHAIN_POLICY_MICROSOFT_ROOT),
pChainContext,
ref PolicyPara2,
ref PolicyStatus2))
{
isMicrosoftRoot = (PolicyStatus2.dwError == 0);
}
}
return isMicrosoftRoot;
}
}
}
IDX12729: Unable to decode the header ‘[PII of type ‘System.String’ is hidden. For more details, see https://aka.ms/IdentityModel/PII.]’ as Base64Url
encoded string.
At C:Program FilesWindowsPowerShellModulesExchangeOnlineManagement3.5.1netFrameworkExchangeOnlineManagement.psm1:762 char:21
+ throw $_.Exception.InnerException;
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ CategoryInfo : OperationStopped: (:) [], ArgumentException
+ FullyQualifiedErrorId : IDX12729: Unable to decode the header ‘[PII of type ‘System.String’ is hidden. For more details, see https://aka.ms/Id
entityModel/PII.]’ as Base64Url encoded string.
I tired:
updateing both modules to the latest versionremoveing the Microsoft.Graph and Microsoft.Graph.Authentication module before connecting to Exchange onlineclearing the token cache file from AppDataLocal.IdentityServicemg.msal.cache
I would like to avoid running two separate script or script isolation like new processes or jobs. Because i need to pass many variables between the two script, input and output.
The app I am using and the cert is okay. If i am running separately it is working, so I can connect Exchange online with it.
Any idea why is this happening? What should i check?
I am creating a new script. The script is running unattended. The script doing a couple things, the important part here is: setting a new user a license and setting the new user mailbox Address book policy. The address book is an Exchange online task the license is Graph. I am getting an error. How to reproduce the error:1. connect to MgGraph, I am using this commandConnect-MgGraph -TenantId $tenantID -AppId $appID -CertificateThumbprint $CertificateThumbPrint -NoWelcome do some work, Disconnect-MgGraph 2. Connect to Exchange online, in the same script:Connect-ExchangeOnline -CertificateThumbPrint $CertificateThumbPrint -AppID $appID -Organization $tenantID -CommandName Get-EXOMailbox,Get-mailbox,Set-mailbox -SkipLoadingCmdletHelp -ShowBanner:$false The command verbose debug output is this: DEBUG:
using System;
using System.Net;
using System.Management.Automation;
using Microsoft.Win32.SafeHandles;
using System.Security.Cryptography;
using System.Runtime.InteropServices;
using System.Runtime.ConstrainedExecution;
using System.Runtime.Versioning;
using System.Security;
namespace Microsoft.PowerShell.Commands.PowerShellGet
{
public static class Telemetry
{
public static void TraceMessageArtifactsNotFound(string[] artifactsNotFound, string operationName)
{
Microsoft.PowerShell.Telemetry.Internal.TelemetryAPI.TraceMessage(operationName, new { ArtifactsNotFound = artifactsNotFound });
}
public static void TraceMessageNonPSGalleryRegistration(string sourceLocationType, string sourceLocationHash, string installationPolicy, strin
g packageManagementProvider, string publishLocationHash, string scriptSourceLocationHash, string scriptPublishLocationHash, string operationName)
{
Microsoft.PowerShell.Telemetry.Internal.TelemetryAPI.TraceMessage(operationName, new { SourceLocationType = sourceLocationType, SourceLoca
tionHash = sourceLocationHash, InstallationPolicy = installationPolicy, PackageManagementProvider = packageManagementProvider, PublishLocationHash = p
ublishLocationHash, ScriptSourceLocationHash = scriptSourceLocationHash, ScriptPublishLocationHash = scriptPublishLocationHash });
}
}
/// <summary>
/// Used by Ping-Endpoint function to supply webproxy to HttpClient
/// We cannot use System.Net.WebProxy because this is not available on CoreClr
/// </summary>
public class InternalWebProxy : IWebProxy
{
Uri _proxyUri;
ICredentials _credentials;
public InternalWebProxy(Uri uri, ICredentials credentials)
{
Credentials = credentials;
_proxyUri = uri;
}
/// <summary>
/// Credentials used by WebProxy
/// </summary>
public ICredentials Credentials
{
get
{
return _credentials;
}
set
{
_credentials = value;
}
}
public Uri GetProxy(Uri destination)
{
return _proxyUri;
}
public bool IsBypassed(Uri host)
{
return false;
}
}
[StructLayout(LayoutKind.Sequential, CharSet=CharSet.Unicode)]
public struct CERT_CHAIN_POLICY_PARA {
public CERT_CHAIN_POLICY_PARA(int size) {
cbSize = (uint) size;
dwFlags = 0;
pvExtraPolicyPara = IntPtr.Zero;
}
public uint cbSize;
public uint dwFlags;
public IntPtr pvExtraPolicyPara;
}
[StructLayout(LayoutKind.Sequential, CharSet=CharSet.Unicode)]
public struct CERT_CHAIN_POLICY_STATUS {
public CERT_CHAIN_POLICY_STATUS(int size) {
cbSize = (uint) size;
dwError = 0;
lChainIndex = IntPtr.Zero;
lElementIndex = IntPtr.Zero;
pvExtraPolicyStatus = IntPtr.Zero;
}
public uint cbSize;
public uint dwError;
public IntPtr lChainIndex;
public IntPtr lElementIndex;
public IntPtr pvExtraPolicyStatus;
}
// Internal SafeHandleZeroOrMinusOneIsInvalid class to remove the dependency on .Net Framework 4.6.
public abstract class InternalSafeHandleZeroOrMinusOneIsInvalid : SafeHandle
{
protected InternalSafeHandleZeroOrMinusOneIsInvalid(bool ownsHandle)
: base(IntPtr.Zero, ownsHandle)
{
}
public override bool IsInvalid
{
get
{
return handle == IntPtr.Zero || handle == new IntPtr(-1);
}
}
}
// Internal SafeX509ChainHandle class to remove the dependency on .Net Framework 4.6.
[SecurityCritical]
public sealed class InternalSafeX509ChainHandle : InternalSafeHandleZeroOrMinusOneIsInvalid {
private InternalSafeX509ChainHandle () : base(true) {}
internal InternalSafeX509ChainHandle (IntPtr handle) : base (true) {
SetHandle(handle);
}
internal static InternalSafeX509ChainHandle InvalidHandle {
get { return new InternalSafeX509ChainHandle(IntPtr.Zero); }
}
[SecurityCritical]
override protected bool ReleaseHandle()
{
CertFreeCertificateChain(handle);
return true;
}
[DllImport(“Crypt32.dll”, SetLastError=true)]
[SuppressUnmanagedCodeSecurity,
ResourceExposure(ResourceScope.None),
ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
private static extern void CertFreeCertificateChain(IntPtr handle);
}
public class Win32Helpers
{
[DllImport(“Crypt32.dll”, CharSet=CharSet.Auto, SetLastError=true)]
public extern static
bool CertVerifyCertificateChainPolicy(
[In] IntPtr pszPolicyOID,
[In] SafeX509ChainHandle pChainContext,
[In] ref CERT_CHAIN_POLICY_PARA pPolicyPara,
[In,Out] ref CERT_CHAIN_POLICY_STATUS pPolicyStatus);
[DllImport(“Crypt32.dll”, CharSet=CharSet.Auto, SetLastError=true)]
public static extern
SafeX509ChainHandle CertDuplicateCertificateChain(
[In] IntPtr pChainContext);
[DllImport(“Crypt32.dll”, CharSet=CharSet.Auto, SetLastError=true)]
[ResourceExposure(ResourceScope.None)]
public static extern
SafeX509ChainHandle CertDuplicateCertificateChain(
[In] SafeX509ChainHandle pChainContext);
public static bool IsMicrosoftCertificate([In] SafeX509ChainHandle pChainContext)
{
//————————————————————————-
// CERT_CHAIN_POLICY_MICROSOFT_ROOT
//
// Checks if the last element of the first simple chain contains a
// Microsoft root public key. If it doesn’t contain a Microsoft root
// public key, dwError is set to CERT_E_UNTRUSTEDROOT.
//
// pPolicyPara is optional. However,
// MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG can be set in
// the dwFlags in pPolicyPara to also check for the Microsoft Test Roots.
//
// MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG can be set
// in the dwFlags in pPolicyPara to check for the Microsoft root for
// application signing instead of the Microsoft product root. This flag
// explicitly checks for the application root only and cannot be combined
// with the test root flag.
//
// MICROSOFT_ROOT_CERT_CHAIN_POLICY_DISABLE_FLIGHT_ROOT_FLAG can be set
// in the dwFlags in pPolicyPara to always disable the Flight root.
//
// pvExtraPolicyPara and pvExtraPolicyStatus aren’t used and must be set
// to NULL.
//————————————————————————–
const uint MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG = 0x00010000;
const uint MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG = 0x00020000;
//const uint MICROSOFT_ROOT_CERT_CHAIN_POLICY_DISABLE_FLIGHT_ROOT_FLAG = 0x00040000;
CERT_CHAIN_POLICY_PARA PolicyPara = new CERT_CHAIN_POLICY_PARA(Marshal.SizeOf(typeof(CERT_CHAIN_POLICY_PARA)));
CERT_CHAIN_POLICY_STATUS PolicyStatus = new CERT_CHAIN_POLICY_STATUS(Marshal.SizeOf(typeof(CERT_CHAIN_POLICY_STATUS)));
int CERT_CHAIN_POLICY_MICROSOFT_ROOT = 7;
PolicyPara.dwFlags = (uint) MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG;
bool isMicrosoftRoot = false;
if(CertVerifyCertificateChainPolicy(new IntPtr(CERT_CHAIN_POLICY_MICROSOFT_ROOT),
pChainContext,
ref PolicyPara,
ref PolicyStatus))
{
isMicrosoftRoot = (PolicyStatus.dwError == 0);
}
// Also check for the Microsoft root for application signing if the Microsoft product root verification is unsuccessful.
if(!isMicrosoftRoot)
{
// Some Microsoft modules can be signed with Microsoft Application Root instead of Microsoft Product Root,
// So we need to use the MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG for the certificate verification.
// MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG can not be used
// with MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG,
// so additional CertVerifyCertificateChainPolicy call is required to verify the given certificate is in Microsoft Application Root.
//
CERT_CHAIN_POLICY_PARA PolicyPara2 = new CERT_CHAIN_POLICY_PARA(Marshal.SizeOf(typeof(CERT_CHAIN_POLICY_PARA)));
CERT_CHAIN_POLICY_STATUS PolicyStatus2 = new CERT_CHAIN_POLICY_STATUS(Marshal.SizeOf(typeof(CERT_CHAIN_POLICY_STATUS)));
PolicyPara2.dwFlags = (uint) MICROSOFT_ROOT_CERT_CHAIN_POLICY_CHECK_APPLICATION_ROOT_FLAG;
if(CertVerifyCertificateChainPolicy(new IntPtr(CERT_CHAIN_POLICY_MICROSOFT_ROOT),
pChainContext,
ref PolicyPara2,
ref PolicyStatus2))
{
isMicrosoftRoot = (PolicyStatus2.dwError == 0);
}
}
return isMicrosoftRoot;
}
}
}
IDX12729: Unable to decode the header ‘[PII of type ‘System.String’ is hidden. For more details, see https://aka.ms/IdentityModel/PII.]’ as Base64Url
encoded string.
At C:Program FilesWindowsPowerShellModulesExchangeOnlineManagement3.5.1netFrameworkExchangeOnlineManagement.psm1:762 char:21
+ throw $_.Exception.InnerException;
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ CategoryInfo : OperationStopped: (:) [], ArgumentException
+ FullyQualifiedErrorId : IDX12729: Unable to decode the header ‘[PII of type ‘System.String’ is hidden. For more details, see https://aka.ms/Id
entityModel/PII.]’ as Base64Url encoded string. I tired:updateing both modules to the latest versionremoveing the Microsoft.Graph and Microsoft.Graph.Authentication module before connecting to Exchange onlineclearing the token cache file from AppDataLocal.IdentityServicemg.msal.cache I would like to avoid running two separate script or script isolation like new processes or jobs. Because i need to pass many variables between the two script, input and output. The app I am using and the cert is okay. If i am running separately it is working, so I can connect Exchange online with it. Any idea why is this happening? What should i check? Read More
“Added More RAM, Windows 10 Fails to Boot”
After purchasing the RAM modules from Best Buy, I encountered an issue where my computer would not boot up properly after installation. Despite the fan running, my monitor displayed nothing. My computer has four slots – two white and two black. The initial 8GB RAM module was in the first black slot. I attempted to install two new 8GB RAM modules into the two white slots, but my computer still failed to start. Subsequently, I tried installing just one of the new modules into the remaining black slot, but the issue persisted. Ultimately, I had to remove the new module, and my computer resumed normal operation with only the original module installed.
After purchasing the RAM modules from Best Buy, I encountered an issue where my computer would not boot up properly after installation. Despite the fan running, my monitor displayed nothing. My computer has four slots – two white and two black. The initial 8GB RAM module was in the first black slot. I attempted to install two new 8GB RAM modules into the two white slots, but my computer still failed to start. Subsequently, I tried installing just one of the new modules into the remaining black slot, but the issue persisted. Ultimately, I had to remove the new module, and my computer resumed normal operation with only the original module installed. Read More
“Windows 10 Installation Issues: Seeking Solutions for Frustrating Crashes”
Greetings,
I am encountering difficulties while attempting to reinstall Windows 10, and I am concerned about the potential loss of my data and applications. Furthermore, my current hard drive does not offer sufficient storage capacity to create a backup. Despite my efforts to initiate the Windows 10 setup process, it consistently crashes. I have also tried entering safe mode, which allowed me to progress a bit further; however, I was unable to complete the installation due to system limitations. In an attempt to troubleshoot, I disabled all non-Microsoft services and startup programs using ‘msconfig,’ which enabled me to reach 100% completion during the update in normal mode, only for it to crash afterward.
Greetings, I am encountering difficulties while attempting to reinstall Windows 10, and I am concerned about the potential loss of my data and applications. Furthermore, my current hard drive does not offer sufficient storage capacity to create a backup. Despite my efforts to initiate the Windows 10 setup process, it consistently crashes. I have also tried entering safe mode, which allowed me to progress a bit further; however, I was unable to complete the installation due to system limitations. In an attempt to troubleshoot, I disabled all non-Microsoft services and startup programs using ‘msconfig,’ which enabled me to reach 100% completion during the update in normal mode, only for it to crash afterward. Read More
How to Discover a Subtle Startup Catalyst?
I’m interested in understanding how the OperaGX Browser app initiates upon computer login. Despite examining the user Startup folder, the All Users Startup folder, and searching for a service, I have been unable to determine its startup method. Specifically, I am seeking information within Windows 10 that specifies the app’s activation upon Windows startup or login, rather than a setting within the application. My goal is to prevent its launch using a tool like Group Policy.
I’m interested in understanding how the OperaGX Browser app initiates upon computer login. Despite examining the user Startup folder, the All Users Startup folder, and searching for a service, I have been unable to determine its startup method. Specifically, I am seeking information within Windows 10 that specifies the app’s activation upon Windows startup or login, rather than a setting within the application. My goal is to prevent its launch using a tool like Group Policy. Read More
Simulink WebView export from R2014 not displayable in Chrome?
WebViews exported from R2013B work fine in Chrome for me. WebViews exported from R2014 display a blank screen. Using the debugging console, I see a javascript error:
Uncaught Error: declare: base class is not a callable constructor. webview.js:558
This is not the well-known local file reference problem (obviously, or the webview.js file would not have been loaded). The same exported WebView displays properly in Firefox and IE11. I need this to work in Chrome, as I have a project where the WebView is supposed to be displayed from inside an application using Chromium Embedded Framework. (I’m seeing the same behavior in the normal Desktop Chrome app and in CEF3). Any ideas what is wrong?
Thanks!WebViews exported from R2013B work fine in Chrome for me. WebViews exported from R2014 display a blank screen. Using the debugging console, I see a javascript error:
Uncaught Error: declare: base class is not a callable constructor. webview.js:558
This is not the well-known local file reference problem (obviously, or the webview.js file would not have been loaded). The same exported WebView displays properly in Firefox and IE11. I need this to work in Chrome, as I have a project where the WebView is supposed to be displayed from inside an application using Chromium Embedded Framework. (I’m seeing the same behavior in the normal Desktop Chrome app and in CEF3). Any ideas what is wrong?
Thanks! WebViews exported from R2013B work fine in Chrome for me. WebViews exported from R2014 display a blank screen. Using the debugging console, I see a javascript error:
Uncaught Error: declare: base class is not a callable constructor. webview.js:558
This is not the well-known local file reference problem (obviously, or the webview.js file would not have been loaded). The same exported WebView displays properly in Firefox and IE11. I need this to work in Chrome, as I have a project where the WebView is supposed to be displayed from inside an application using Chromium Embedded Framework. (I’m seeing the same behavior in the normal Desktop Chrome app and in CEF3). Any ideas what is wrong?
Thanks! simulink, webview, chrome MATLAB Answers — New Questions
Calling Matlab Function with Multiple Returns in C++ Semantic Error
Hello, I am running into a type error where I am not able to recieve multiple returns from a custom matlab function that gives multiple returns. Here is the C++ implementation, function and error message which is defined by the IDE as a semantic issue.
C++ CALL:
matlab::data::TypedArray<double> const errArray =
factory.createArray({1,10}, {-5.6355,-5.6188,-5.6022,-5.5856,-5.569,-5.5524,-5.5358,-5.5193,-5.5027,-5.4862});
std::vector<matlab::data::Array> args({errArray});
std::vector<matlab::data::Array> m_results = matlabPtr->feval(u"func",args);
MATLAB FUNCTION:
function [cnt,rng,mn,adq,des] = func(errArr)
matrix = rainflow(errArr);
cnt = matrix(:,1);
rng = matrix(:,2);
mn = matrix(:,3);
adq = 1;
des = 1;
end
ERROR MESSAGE (SEMANTIC ISSUE):
MatlabCall.cpp:80:38: No viable conversion from ‘matlab::data::Array’ to ‘std::vector<matlab::data::Array>’
stl_vector.h:553:7: candidate constructor not viable: no known conversion from ‘matlab::data::Array’ to ‘const vector<Array> &’ for 1st argument
stl_vector.h:572:7: candidate constructor not viable: no known conversion from ‘matlab::data::Array’ to ‘vector<Array> &&’ for 1st argument
stl_vector.h:625:7: candidate constructor not viable: no known conversion from ‘matlab::data::Array’ to ‘initializer_list<value_type>’ (aka ‘initializer_list<matlab::data::Array>’) for 1st argument
stl_vector.h:497:7: explicit constructor is not a candidate
stl_vector.h:510:7: explicit constructor is not a candidate
I am wondering why the data type for m_results is matlab::data::array despite it being defined as a std::vector<matlab::data::Array>. The matlab engine is properly implemented and can run the multi-return example as provided here, https://www.mathworks.com/help/matlab/matlab_external/call-matlab-functions-from-c-1.html#mw_f777193c-c848-49fc-9591-bbb56a40a454, it just is seeming to have issue with my custom function.
I have tried the following:
give errArray as the feval argument (produces same error message)
define a size_t numReturned constant as shown in the documentation example (too many arguments error)
Thank you in advance.Hello, I am running into a type error where I am not able to recieve multiple returns from a custom matlab function that gives multiple returns. Here is the C++ implementation, function and error message which is defined by the IDE as a semantic issue.
C++ CALL:
matlab::data::TypedArray<double> const errArray =
factory.createArray({1,10}, {-5.6355,-5.6188,-5.6022,-5.5856,-5.569,-5.5524,-5.5358,-5.5193,-5.5027,-5.4862});
std::vector<matlab::data::Array> args({errArray});
std::vector<matlab::data::Array> m_results = matlabPtr->feval(u"func",args);
MATLAB FUNCTION:
function [cnt,rng,mn,adq,des] = func(errArr)
matrix = rainflow(errArr);
cnt = matrix(:,1);
rng = matrix(:,2);
mn = matrix(:,3);
adq = 1;
des = 1;
end
ERROR MESSAGE (SEMANTIC ISSUE):
MatlabCall.cpp:80:38: No viable conversion from ‘matlab::data::Array’ to ‘std::vector<matlab::data::Array>’
stl_vector.h:553:7: candidate constructor not viable: no known conversion from ‘matlab::data::Array’ to ‘const vector<Array> &’ for 1st argument
stl_vector.h:572:7: candidate constructor not viable: no known conversion from ‘matlab::data::Array’ to ‘vector<Array> &&’ for 1st argument
stl_vector.h:625:7: candidate constructor not viable: no known conversion from ‘matlab::data::Array’ to ‘initializer_list<value_type>’ (aka ‘initializer_list<matlab::data::Array>’) for 1st argument
stl_vector.h:497:7: explicit constructor is not a candidate
stl_vector.h:510:7: explicit constructor is not a candidate
I am wondering why the data type for m_results is matlab::data::array despite it being defined as a std::vector<matlab::data::Array>. The matlab engine is properly implemented and can run the multi-return example as provided here, https://www.mathworks.com/help/matlab/matlab_external/call-matlab-functions-from-c-1.html#mw_f777193c-c848-49fc-9591-bbb56a40a454, it just is seeming to have issue with my custom function.
I have tried the following:
give errArray as the feval argument (produces same error message)
define a size_t numReturned constant as shown in the documentation example (too many arguments error)
Thank you in advance. Hello, I am running into a type error where I am not able to recieve multiple returns from a custom matlab function that gives multiple returns. Here is the C++ implementation, function and error message which is defined by the IDE as a semantic issue.
C++ CALL:
matlab::data::TypedArray<double> const errArray =
factory.createArray({1,10}, {-5.6355,-5.6188,-5.6022,-5.5856,-5.569,-5.5524,-5.5358,-5.5193,-5.5027,-5.4862});
std::vector<matlab::data::Array> args({errArray});
std::vector<matlab::data::Array> m_results = matlabPtr->feval(u"func",args);
MATLAB FUNCTION:
function [cnt,rng,mn,adq,des] = func(errArr)
matrix = rainflow(errArr);
cnt = matrix(:,1);
rng = matrix(:,2);
mn = matrix(:,3);
adq = 1;
des = 1;
end
ERROR MESSAGE (SEMANTIC ISSUE):
MatlabCall.cpp:80:38: No viable conversion from ‘matlab::data::Array’ to ‘std::vector<matlab::data::Array>’
stl_vector.h:553:7: candidate constructor not viable: no known conversion from ‘matlab::data::Array’ to ‘const vector<Array> &’ for 1st argument
stl_vector.h:572:7: candidate constructor not viable: no known conversion from ‘matlab::data::Array’ to ‘vector<Array> &&’ for 1st argument
stl_vector.h:625:7: candidate constructor not viable: no known conversion from ‘matlab::data::Array’ to ‘initializer_list<value_type>’ (aka ‘initializer_list<matlab::data::Array>’) for 1st argument
stl_vector.h:497:7: explicit constructor is not a candidate
stl_vector.h:510:7: explicit constructor is not a candidate
I am wondering why the data type for m_results is matlab::data::array despite it being defined as a std::vector<matlab::data::Array>. The matlab engine is properly implemented and can run the multi-return example as provided here, https://www.mathworks.com/help/matlab/matlab_external/call-matlab-functions-from-c-1.html#mw_f777193c-c848-49fc-9591-bbb56a40a454, it just is seeming to have issue with my custom function.
I have tried the following:
give errArray as the feval argument (produces same error message)
define a size_t numReturned constant as shown in the documentation example (too many arguments error)
Thank you in advance. c++, matlab engine, function, matlab engine api MATLAB Answers — New Questions
Simulink Library Browser Icons
Some icons in the Simulink Object Browser are not the correct icons. When the incorrect icon is dragged into a new model window it changes into the correct icon. Specifically the buffer icon in DSP System Toolbox>System Management>Buffers. The icon in the library window shows an icon containing the text ‘-40 db’ but when it is dragged into a new model window it changes into a buffer icon.Some icons in the Simulink Object Browser are not the correct icons. When the incorrect icon is dragged into a new model window it changes into the correct icon. Specifically the buffer icon in DSP System Toolbox>System Management>Buffers. The icon in the library window shows an icon containing the text ‘-40 db’ but when it is dragged into a new model window it changes into a buffer icon. Some icons in the Simulink Object Browser are not the correct icons. When the incorrect icon is dragged into a new model window it changes into the correct icon. Specifically the buffer icon in DSP System Toolbox>System Management>Buffers. The icon in the library window shows an icon containing the text ‘-40 db’ but when it is dragged into a new model window it changes into a buffer icon. simulink MATLAB Answers — New Questions
How to configure Upload data button in App design
Designinh an App i had drag a button i want confure this button to upload raw dataDesigninh an App i had drag a button i want confure this button to upload raw data Designinh an App i had drag a button i want confure this button to upload raw data appdesigner MATLAB Answers — New Questions
“Unpublish” button not showing on “Site Pages” library where “Approve/Reject” Flow configured
Hi,
I have SharePoint “Site Pages” Library where Site Pages or News Posts are getting created. I have configured “Power Automate Flow” for Approval/Reject for selected item/page. Earlier I used to get “Unpublish” button on clicking on “More” option for selected item. But recently I am not getting this option instead getting as “Send for Approval or Cancel Approval”.
How can I enable “Unpublish” button again keeping Power Automate intact ? Or is this default behavior of functionality in SharePoint Online If so is there any documentation available ?
Regards,
Pratik
Hi, I have SharePoint “Site Pages” Library where Site Pages or News Posts are getting created. I have configured “Power Automate Flow” for Approval/Reject for selected item/page. Earlier I used to get “Unpublish” button on clicking on “More” option for selected item. But recently I am not getting this option instead getting as “Send for Approval or Cancel Approval”. How can I enable “Unpublish” button again keeping Power Automate intact ? Or is this default behavior of functionality in SharePoint Online If so is there any documentation available ? Regards,Pratik Read More
Replace partial text string from column based on value
I need to replace the value of column A with a value depending on part of the content of the cell in column A.
So let’s say…
A2 = “Company-a2”, I want to replace only the ‘a’ with a ‘c’, making the result “Company-c2” BUT if…
A2 = “Company-b2”, I want to replace only the ‘b’ with a ‘d’, making the result “Company-d2”
Pulling my hair out with this one 🤪
I need to replace the value of column A with a value depending on part of the content of the cell in column A.So let’s say…A2 = “Company-a2”, I want to replace only the ‘a’ with a ‘c’, making the result “Company-c2” BUT if…A2 = “Company-b2”, I want to replace only the ‘b’ with a ‘d’, making the result “Company-d2″Pulling my hair out with this one 🤪 Read More
Fabric API: Update user scopes
Context: The token I generated was retrieved by logging in with ‘Login-PowerBI’, followed by “Get-PowerBiAccessToken -asstring” inside of Powershell. This token was then copied and used inside of postman for the authorization. I didn’t use any extra parameters. Since the current usage of a Service Principal is quite limited for the Fabric API, we’re opting to use the personal bearer token.
Scenario: At our company we’re trying to experiment a bit with the MS Fabric API (https://api.fabric.microsoft.com). We’ve been able to use the API to list the workspaces, items, … basically most standard get calls you could imagine.
We are able to create workspaces via my individual token, but I’m unable to create individual items inside of a workspace. This is due to the fact that I don’t have any individual item level scopes assigned to me for now. My current scopes are: “App.Read.All Capacity.Read.All Capacity.ReadWrite.All Content.Create Dashboard.Read.All Dashboard.ReadWrite.All Dataflow.Read.All Dataflow.ReadWrite.All Dataset.Read.All Dataset.ReadWrite.All Gateway.Read.All Gateway.ReadWrite.All Pipeline.Deploy Pipeline.Read.All Pipeline.ReadWrite.All Report.Read.All Report.ReadWrite.All StorageAccount.Read.All StorageAccount.ReadWrite.All Tenant.Read.All Tenant.ReadWrite.All UserState.ReadWrite.All Workspace.Read.All Workspace.ReadWrite.All”
As you can see, it’s quite normal that I’m only able to create a Workspace. But I want to be able to assign e.g. “Notebook.ReadWrite.All” to my user. How do I do this for an individual user? I’m trying to automate as much as possible using Powershell scripts, but the current scopes are quite limited.
Goal: Using a personal bearer token (not generated through a service principal) to create a notebook item. Endpoint: https://learn.microsoft.com/en-us/rest/api/fabric/core/items/create-item?tabs=HTTP
Question: Is it possible to do this without the usage of a service principal? I know you can use the copy(PowerBIAccessToken) inside of the dev-tools in your browser, but I want to do it more automatically.
Hopefully this is the right section to ask such things, and thank you in advance for your help!
Context: The token I generated was retrieved by logging in with ‘Login-PowerBI’, followed by “Get-PowerBiAccessToken -asstring” inside of Powershell. This token was then copied and used inside of postman for the authorization. I didn’t use any extra parameters. Since the current usage of a Service Principal is quite limited for the Fabric API, we’re opting to use the personal bearer token.Scenario: At our company we’re trying to experiment a bit with the MS Fabric API (https://api.fabric.microsoft.com). We’ve been able to use the API to list the workspaces, items, … basically most standard get calls you could imagine. We are able to create workspaces via my individual token, but I’m unable to create individual items inside of a workspace. This is due to the fact that I don’t have any individual item level scopes assigned to me for now. My current scopes are: “App.Read.All Capacity.Read.All Capacity.ReadWrite.All Content.Create Dashboard.Read.All Dashboard.ReadWrite.All Dataflow.Read.All Dataflow.ReadWrite.All Dataset.Read.All Dataset.ReadWrite.All Gateway.Read.All Gateway.ReadWrite.All Pipeline.Deploy Pipeline.Read.All Pipeline.ReadWrite.All Report.Read.All Report.ReadWrite.All StorageAccount.Read.All StorageAccount.ReadWrite.All Tenant.Read.All Tenant.ReadWrite.All UserState.ReadWrite.All Workspace.Read.All Workspace.ReadWrite.All”As you can see, it’s quite normal that I’m only able to create a Workspace. But I want to be able to assign e.g. “Notebook.ReadWrite.All” to my user. How do I do this for an individual user? I’m trying to automate as much as possible using Powershell scripts, but the current scopes are quite limited.Goal: Using a personal bearer token (not generated through a service principal) to create a notebook item. Endpoint: https://learn.microsoft.com/en-us/rest/api/fabric/core/items/create-item?tabs=HTTP Question: Is it possible to do this without the usage of a service principal? I know you can use the copy(PowerBIAccessToken) inside of the dev-tools in your browser, but I want to do it more automatically.Hopefully this is the right section to ask such things, and thank you in advance for your help! Read More
Azure Blogs – Articles from 15-July-2024 to 21-July-2024
AI + Machine Learning
Covering: Anomaly Detector, Azure Bot Services, Azure Cognitive Search, Azure ML, Azure Open Datasets, Azure Cognitive Services, Azure Video Indexer, Computer Vision, Content Moderator, Custom Vision, Data Science VM, Face API, Azure Form Recognizer, Azure Immersive Reader, Kinect DK, Language Understanding (LUIS), Microsoft Genomics, Personalizer, Project Bonsai, QnA Maker, Speaker recognition, Speech to Text, Speech translation, Cognitive Service for Language, Text to Speech, Translator, Azure Metrics Advisor, Health Bot, Azure Percept, Azure Applied AI Services, Azure OpenAI Service
Use WebGPU + ONNX Runtime Web + Transformer.js to build RAG applications by Phi-3-mini
Leveraging phi-3 for an Enhanced Semantic Cache in RAG Applications
From Paper to Pixels: Azure AI in Historical Document Digitization and Translation
Enhancing Document Extraction with Azure AI Document Intelligence and LangChain for RAG Workflows.
New Video Course: Generative AI for Beginners
Fine-Tune and Integrate Custom Phi-3 Models with Prompt Flow in Azure AI Studio
Connect with Application Insights in ‘not Local auth mode’ using OpenTelemetry
Tracing LangChain Code on Azure with OpenTelemetry and Application Insights
Responsible AI Innovation From Principles to Practice: Developer Resources
Using App Spaces to build a chat agent with OpenAI’s Node.js SDK
Save Big on Hosting Your Fine-Tuned Models on Azure OpenAI Service
Running Phi-3-vision via ONNX on Jetson Platform
Announcing Custom Categories Public Preview in Azure AI Content Safety
Pre-Job Health Checks on AKS: A Guide to Stable AI Workloads
Why? and How to Ground a Large Language Models using your Data? (RAG)
Build Powerful RAG Apps Without Code Using LangFlow and Azure OpenAI
Fine-Tune and Integrate Custom Phi-3 Models with Prompt Flow: Step-by-Step Guide
Ready, Set, AI: What our People Science research tells us about AI Readiness
Exploring the Advanced RAG (Retrieval Augmented Generation) Service
Analytics
Covering: Azure Analysis Services, Azure Data Explorer, Azure Data Factory, Azure Data Lake Storage, Azure Data Share, Azure Databricks, Azure Stream Analytics, Azure Synapse Analytics, Data Catalog, Data Lake Analytics, HDInsight, Power BI Embedded, R Server for HDInsight, Microsoft Purview, Microsoft Graph Data Connect, Azure Chaos Studio
Microsoft Power BI and Microsoft Defender for Cloud
Partner Case Study Series | Exasol With Azure and Power BI
Microsoft Purview integrates with ChatGPT Enterprise Compliance API to support compliance
Microsoft Purview Data Governance will be generally available September 1, 2024
Compute
Covering: Azure CycleCloud, Azure Quantum, Azure Spot Virtual Machines, Azure VMware Solution, Batch, Linux Virtual Machines, Virtual Machine Scale Sets, Virtual Machines, Azure Dedicated Host, Azure VM Image Builder, Azure Functions, Service Fabric
Announcing Preview of New Azure Dlsv6, Dsv6, Esv6 VMs with new CPU, Azure Boost, and NVMe Support
Converting Azure Virtual Machines running Windows from SCSI to NVMe
Azure Capacity Reservations with Automatic Consumption
Recovery options for Azure Virtual Machines (VM) affected by CrowdStrike Falcon agent
New Recovery Tool to help with CrowdStrike issue impacting Windows endpoints
Azure Update Manager to support CIS hardened images among other images
Containers
Covering: Azure Kubernetes Service (AKS), Azure Red Hat OpenShift, Azure Container Apps, Web App for Containers, Azure Container Instances, Azure Container Registry
Microsoft Copilot in Azure Series – AKS Cluster configuration and management
Azure Capacity Reservations with Automatic Consumption
Pre-Job Health Checks on AKS: A Guide to Stable AI Workloads
Comparing feature sets for AKS enabled by Azure Arc deployment options
Databases
Covering: Azure Cache for Redis, Azure Cosmos DB, Azure Database for MariaDB, Azure Database for MySQL, Azure Database for PostgreSQL, Azure SQL, Azure SQL Database, Azure SQL Edge, Azure SQL Managed Instance, SQL Server on Azure VM, Table Storage, Azure Managed Instance for Apache Cassandra, Azure Confidential Ledger
Effectively troubleshoot latency in SQL Server Transactional replication: Part 1
Effectively troubleshoot latency in SQL Server Transactional replication: Part 2
Three New Capabilities to Modernize your SQL Server Anywhere with Azure Arc | Data Exposed
101 of Troubleshooting SQL Server on Linux
Connect Azure SQL Server via User Assigned Managed Identity under Django
Oracle Database@Azure Achieves Extensive Certifications: Elevating Security, Reliability, and Trust
Migrating SLURM Job Accounting from Azure Database for MariaDB to MySQL Flexible Server
Developer Tools
Covering: App Configuration, Azure DevTest Labs, Azure Lab Services, SDKs, Visual Studio, Visual Studio Code, Azure Load Testing
.NET 9 Preview 6 is now available!
Introducing Learn Cloud: A VS Code Extension to simplify your First Deployment to the Cloud.
Learn how to build Python Web Apps from our 6-part series!
[Mitigated] Azure Lab Services – Lab Plan Outage
.NET 6 will reach End of Support on November 12, 2024
Introducing CoreWCF and WCF Client Azure Queue Storage bindings for .NET
Mastering your cloud journey: Essentials to Innovating, Migrating and Modernizing, on Azure
Azure Lab Services – Maintenance update outage
[Mitigated] Azure Lab Services – Maintenance update outage
DevOps
Covering: Azure Artifacts, Azure Boards, Azure DevOps, Azure Pipelines, Azure Repos, Azure Test Plans, DevOps tool integrations, Azure Load Testing
DevOps Conference Returns: Global DevOps Experience
Study guide: GitHub Actions certification
Limiting access to reorder Azure DevOps backlog
Hybrid
Covering: Microsoft Azure Stack, Azure Arc
Comparing feature sets for AKS enabled by Azure Arc deployment options
Azure Monitor: How To Get Alerts for Disconnected Arc Agents
Identity
Covering: Azure Active Directory, Multi-factor Authentication, Azure Active Directory Domain Services, Azure Active Directory External Identities
No New Articles
Integration
Covering: API Management, Event Grid, Logic Apps , Service Bus
The Rising Significance of APIs – Azure API Management & API Center
Announcing API Management and API Center Community Live Stream on Thursday, July 25th
Configuring a Disaster Recovery Solution for Azure Service Bus with Basic Tier
Step by step Guidance on Logic App Standard Load Testing and Optimization
Internet Of Things
Covering: Azure IoT Central, Azure IoT Edge, Azure IoT Hub, Azure RTOS, Azure Sphere, Azure Stream Analytics, Azure Time Series Insights, Microsoft Defender for IoT, Azure Percept, Windows for IoT
No New Articles
Management and Governance
Covering: Automation, Azure Advisor, Azure Backup, Azure Blueprints, Azure Lighthouse, Azure Monitor, Azure Policy, Azure Resource Manager, Azure Service Health, Azure Site Recovery, Cloud Shell, Cost Management, Azure Portal, Network Watcher, Azure Automanage, Azure Resource Mover, Azure Chaos Studio, Azure Managed Grafana
Azure Capacity Reservations with Automatic Consumption
FabSoft, Just Software, and Volo offer transactable partner solutions in Azure Marketplace
Mastering your cloud journey: Essentials to Innovating, Migrating and Modernizing, on Azure
Azure Landing Zones Accelerators for Bicep and Terraform. Announcing General Availability!
Azure Monitor: How To Get Alerts for Disconnected Arc Agents
Empowering accessibility and innovation through cloud-based telemedicine with Microsoft Azure
Migration
Covering: Azure Database Migration Service, Azure Migrate, Data Box, Azure Site Recovery
Mastering your cloud journey: Essentials to Innovating, Migrating and Modernizing, on Azure
Mixed Reality
Covering: Digital Twins, Kinect DK, Spatial Anchors, Remote Rendering, Object Anchors
No New Articles
Mobile
Covering: Azure Maps, MAUI, Notification Hubs, Visual Studio App Center, Xamarin, Azure Communication Services
Learn Live Series – Crie uma LOB com OpenAI, Azure Communication Services e MS Graph (Parte II)
Enable location analytics with Azure Maps
Networking
Covering: Application Gateway, Bastion, DDoS Protection, DNS, Azure ExpressRoute, Azure Firewall, Load Balancer, Firewall Manager, Front Door, Internet Analyzer, Azure Private Link, Content Delivery Network, Network Watcher, Traffic Manager, Virtual Network, Virtual WAN, VPN Gateway, Web Application Firewall, Azure Orbital, Route Server, Network Function Manager, Virtual Network Manager, Azure Private 5G Core
Azure Virtual WAN configuration best practices
Portal extension for Azure Firewall with DDoS protection
VPN Gateway – BGP AS PATH – Steer which VPN tunnel traffic will flow form Azure to OnPrem
Use cases of Advanced Network Observability for your Azure Kubernetes Service clusters
Security
Covering: Defender for Cloud, DDoS Protection, Dedicated HSM, Azure Information Protection, Microsoft Sentinel, Key Vault, Microsoft Defender for Cloud, Microsoft Defender for IoT, Microsoft Azure Attestation, Azure Confidential Ledger
Make OT security a core part of your SOC strategy with Microsoft Defender XDR
Microsoft Power BI and Microsoft Defender for Cloud
Microsoft Defender for Cloud PoC Series – Microsoft Defender for APIs
Storage
Covering: Archive Storage, Avere vFXT for Azure, Azure Data Lake Storage, Azure Data Share, Files, FXT Edge Filer, HPC Cache, NetApp Files, Blob Storage, Data Box, Disk Storage, Queue Storage, Storage Accounts, Storage Explorer, StorSimple
Introducing CoreWCF and WCF Client Azure Queue Storage bindings for .NET
Web
Covering: App Configuration, App Service, Azure Cognitive Search, Azure Maps, Azure SignalR Service, Static Web Apps, Azure Communication Services, Azure Web PubSub, Azure Fluid Relay, Web App for Containers
Introducing Learn Cloud: A VS Code Extension to simplify your First Deployment to the Cloud.
Azure Virtual Desktop
Covering: Windows Virtual Desktop, VMware Horizon Cloud on Microsoft Azure, Citrix Virtual Apps and Desktops for Azure
No New Articles
The Importance of Validation HostPools in AVD Deployments: Lessons from the CrowdStrike Global Issue
AI + Machine Learning
Covering: Anomaly Detector, Azure Bot Services, Azure Cognitive Search, Azure ML, Azure Open Datasets, Azure Cognitive Services, Azure Video Indexer, Computer Vision, Content Moderator, Custom Vision, Data Science VM, Face API, Azure Form Recognizer, Azure Immersive Reader, Kinect DK, Language Understanding (LUIS), Microsoft Genomics, Personalizer, Project Bonsai, QnA Maker, Speaker recognition, Speech to Text, Speech translation, Cognitive Service for Language, Text to Speech, Translator, Azure Metrics Advisor, Health Bot, Azure Percept, Azure Applied AI Services, Azure OpenAI Service
Use WebGPU + ONNX Runtime Web + Transformer.js to build RAG applications by Phi-3-mini
Leveraging phi-3 for an Enhanced Semantic Cache in RAG Applications
From Paper to Pixels: Azure AI in Historical Document Digitization and Translation
Enhancing Document Extraction with Azure AI Document Intelligence and LangChain for RAG Workflows.
New Video Course: Generative AI for Beginners
Fine-Tune and Integrate Custom Phi-3 Models with Prompt Flow in Azure AI Studio
Connect with Application Insights in ‘not Local auth mode’ using OpenTelemetry
Tracing LangChain Code on Azure with OpenTelemetry and Application Insights
Responsible AI Innovation From Principles to Practice: Developer Resources
Using App Spaces to build a chat agent with OpenAI’s Node.js SDK
Save Big on Hosting Your Fine-Tuned Models on Azure OpenAI Service
Running Phi-3-vision via ONNX on Jetson Platform
Announcing Custom Categories Public Preview in Azure AI Content Safety
Pre-Job Health Checks on AKS: A Guide to Stable AI Workloads
Why? and How to Ground a Large Language Models using your Data? (RAG)
Build Powerful RAG Apps Without Code Using LangFlow and Azure OpenAI
Fine-Tune and Integrate Custom Phi-3 Models with Prompt Flow: Step-by-Step Guide
Ready, Set, AI: What our People Science research tells us about AI Readiness
Exploring the Advanced RAG (Retrieval Augmented Generation) Service
AI on the road: Azure OpenAI Service helps drive better decision making for the transportation sector
Analytics
Covering: Azure Analysis Services, Azure Data Explorer, Azure Data Factory, Azure Data Lake Storage, Azure Data Share, Azure Databricks, Azure Stream Analytics, Azure Synapse Analytics, Data Catalog, Data Lake Analytics, HDInsight, Power BI Embedded, R Server for HDInsight, Microsoft Purview, Microsoft Graph Data Connect, Azure Chaos Studio
Microsoft Power BI and Microsoft Defender for Cloud
Partner Case Study Series | Exasol With Azure and Power BI
Microsoft Purview integrates with ChatGPT Enterprise Compliance API to support compliance
Microsoft Purview Data Governance will be generally available September 1, 2024
Compute
Covering: Azure CycleCloud, Azure Quantum, Azure Spot Virtual Machines, Azure VMware Solution, Batch, Linux Virtual Machines, Virtual Machine Scale Sets, Virtual Machines, Azure Dedicated Host, Azure VM Image Builder, Azure Functions, Service Fabric
Announcing Preview of New Azure Dlsv6, Dsv6, Esv6 VMs with new CPU, Azure Boost, and NVMe Support
Converting Azure Virtual Machines running Windows from SCSI to NVMe
Azure Capacity Reservations with Automatic Consumption
Recovery options for Azure Virtual Machines (VM) affected by CrowdStrike Falcon agent
New Recovery Tool to help with CrowdStrike issue impacting Windows endpoints
Azure Update Manager to support CIS hardened images among other images
Containers
Covering: Azure Kubernetes Service (AKS), Azure Red Hat OpenShift, Azure Container Apps, Web App for Containers, Azure Container Instances, Azure Container Registry
Microsoft Copilot in Azure Series – AKS Cluster configuration and management
Azure Capacity Reservations with Automatic Consumption
Pre-Job Health Checks on AKS: A Guide to Stable AI Workloads
Comparing feature sets for AKS enabled by Azure Arc deployment options
Use cases of Advanced Network Observability for your Azure Kubernetes Service clusters
Databases
Covering: Azure Cache for Redis, Azure Cosmos DB, Azure Database for MariaDB, Azure Database for MySQL, Azure Database for PostgreSQL, Azure SQL, Azure SQL Database, Azure SQL Edge, Azure SQL Managed Instance, SQL Server on Azure VM, Table Storage, Azure Managed Instance for Apache Cassandra, Azure Confidential Ledger
Effectively troubleshoot latency in SQL Server Transactional replication: Part 1
Effectively troubleshoot latency in SQL Server Transactional replication: Part 2
Three New Capabilities to Modernize your SQL Server Anywhere with Azure Arc | Data Exposed
101 of Troubleshooting SQL Server on Linux
Connect Azure SQL Server via User Assigned Managed Identity under Django
Oracle Database@Azure Achieves Extensive Certifications: Elevating Security, Reliability, and Trust
Migrating SLURM Job Accounting from Azure Database for MariaDB to MySQL Flexible Server
Developer Tools
Covering: App Configuration, Azure DevTest Labs, Azure Lab Services, SDKs, Visual Studio, Visual Studio Code, Azure Load Testing
.NET 9 Preview 6 is now available!
Introducing Learn Cloud: A VS Code Extension to simplify your First Deployment to the Cloud.
Learn how to build Python Web Apps from our 6-part series!
[Mitigated] Azure Lab Services – Lab Plan Outage
.NET 6 will reach End of Support on November 12, 2024
Introducing CoreWCF and WCF Client Azure Queue Storage bindings for .NET
Mastering your cloud journey: Essentials to Innovating, Migrating and Modernizing, on Azure
Azure Lab Services – Maintenance update outage
[Mitigated] Azure Lab Services – Maintenance update outage
DevOps
Covering: Azure Artifacts, Azure Boards, Azure DevOps, Azure Pipelines, Azure Repos, Azure Test Plans, DevOps tool integrations, Azure Load Testing
DevOps Conference Returns: Global DevOps Experience
Study guide: GitHub Actions certification
Limiting access to reorder Azure DevOps backlog
Hybrid
Covering: Microsoft Azure Stack, Azure Arc
Comparing feature sets for AKS enabled by Azure Arc deployment options
Azure Monitor: How To Get Alerts for Disconnected Arc Agents
Three New Capabilities to Modernize your SQL Server Anywhere with Azure Arc | Data Exposed
Identity
Covering: Azure Active Directory, Multi-factor Authentication, Azure Active Directory Domain Services, Azure Active Directory External Identities
No New Articles
Integration
Covering: API Management, Event Grid, Logic Apps , Service Bus
The Rising Significance of APIs – Azure API Management & API Center
Announcing API Management and API Center Community Live Stream on Thursday, July 25th
Configuring a Disaster Recovery Solution for Azure Service Bus with Basic Tier
Step by step Guidance on Logic App Standard Load Testing and Optimization
Internet Of Things
Covering: Azure IoT Central, Azure IoT Edge, Azure IoT Hub, Azure RTOS, Azure Sphere, Azure Stream Analytics, Azure Time Series Insights, Microsoft Defender for IoT, Azure Percept, Windows for IoT
No New Articles
Management and Governance
Covering: Automation, Azure Advisor, Azure Backup, Azure Blueprints, Azure Lighthouse, Azure Monitor, Azure Policy, Azure Resource Manager, Azure Service Health, Azure Site Recovery, Cloud Shell, Cost Management, Azure Portal, Network Watcher, Azure Automanage, Azure Resource Mover, Azure Chaos Studio, Azure Managed Grafana
Azure Capacity Reservations with Automatic Consumption
FabSoft, Just Software, and Volo offer transactable partner solutions in Azure Marketplace
Mastering your cloud journey: Essentials to Innovating, Migrating and Modernizing, on Azure
Azure Landing Zones Accelerators for Bicep and Terraform. Announcing General Availability!
Azure Monitor: How To Get Alerts for Disconnected Arc Agents
Empowering accessibility and innovation through cloud-based telemedicine with Microsoft Azure
Migration
Covering: Azure Database Migration Service, Azure Migrate, Data Box, Azure Site Recovery
Mastering your cloud journey: Essentials to Innovating, Migrating and Modernizing, on Azure
Mixed Reality
Covering: Digital Twins, Kinect DK, Spatial Anchors, Remote Rendering, Object Anchors
No New Articles
Mobile
Covering: Azure Maps, MAUI, Notification Hubs, Visual Studio App Center, Xamarin, Azure Communication Services
Learn Live Series – Crie uma LOB com OpenAI, Azure Communication Services e MS Graph (Parte II)
Enable location analytics with Azure Maps
Networking
Covering: Application Gateway, Bastion, DDoS Protection, DNS, Azure ExpressRoute, Azure Firewall, Load Balancer, Firewall Manager, Front Door, Internet Analyzer, Azure Private Link, Content Delivery Network, Network Watcher, Traffic Manager, Virtual Network, Virtual WAN, VPN Gateway, Web Application Firewall, Azure Orbital, Route Server, Network Function Manager, Virtual Network Manager, Azure Private 5G Core
Azure Virtual WAN configuration best practices
Portal extension for Azure Firewall with DDoS protection
VPN Gateway – BGP AS PATH – Steer which VPN tunnel traffic will flow form Azure to OnPrem
Use cases of Advanced Network Observability for your Azure Kubernetes Service clusters
Security
Covering: Defender for Cloud, DDoS Protection, Dedicated HSM, Azure Information Protection, Microsoft Sentinel, Key Vault, Microsoft Defender for Cloud, Microsoft Defender for IoT, Microsoft Azure Attestation, Azure Confidential Ledger
Make OT security a core part of your SOC strategy with Microsoft Defender XDR
Microsoft Power BI and Microsoft Defender for Cloud
Microsoft Defender for Cloud PoC Series – Microsoft Defender for APIs
Storage
Covering: Archive Storage, Avere vFXT for Azure, Azure Data Lake Storage, Azure Data Share, Files, FXT Edge Filer, HPC Cache, NetApp Files, Blob Storage, Data Box, Disk Storage, Queue Storage, Storage Accounts, Storage Explorer, StorSimple
Introducing CoreWCF and WCF Client Azure Queue Storage bindings for .NET
Web
Covering: App Configuration, App Service, Azure Cognitive Search, Azure Maps, Azure SignalR Service, Static Web Apps, Azure Communication Services, Azure Web PubSub, Azure Fluid Relay, Web App for Containers
Introducing Learn Cloud: A VS Code Extension to simplify your First Deployment to the Cloud.
Azure Virtual Desktop
Covering: Windows Virtual Desktop, VMware Horizon Cloud on Microsoft Azure, Citrix Virtual Apps and Desktops for Azure
No New Articles
The Importance of Validation HostPools in AVD Deployments: Lessons from the CrowdStrike Global Issue Read More
Wie kann ich Videos von YouTube herunterladen?
Hallo zusammen, ich habe ein Problem und hoffe, jemand kann mir helfen. Ich möchte gerne Videos von YouTube herunterladen, um sie offline zu sehen, aber ich finde keine gute Lösung. Ich habe es bereits mit einigen Online-Downloadern versucht, aber die waren voller Werbung und die Download-Geschwindigkeit war extrem langsam. Hat jemand von euch eine Empfehlung für eine zuverlässige Methode, um YouTube-Videos herunterzuladen? Vielleicht eine App oder ein Programm, das ohne diese Probleme funktioniert? Vielen Dank im Voraus für eure Tipps!
Hallo zusammen, ich habe ein Problem und hoffe, jemand kann mir helfen. Ich möchte gerne Videos von YouTube herunterladen, um sie offline zu sehen, aber ich finde keine gute Lösung. Ich habe es bereits mit einigen Online-Downloadern versucht, aber die waren voller Werbung und die Download-Geschwindigkeit war extrem langsam. Hat jemand von euch eine Empfehlung für eine zuverlässige Methode, um YouTube-Videos herunterzuladen? Vielleicht eine App oder ein Programm, das ohne diese Probleme funktioniert? Vielen Dank im Voraus für eure Tipps! Read More
Unable to Retrieve All Internal Lists Using “GetListCollection” Method in SharePoint 2019
When working with SharePoint on-premises Server 2019, I am encountering issues with the SOAP service. Specifically, I am unable to fetch all internal lists using the “GetListCollection” method. Is there another endpoint to retrieve a comprehensive list of all lists? Please refer to the details below:
Language: Java 17Permissions and Access Rights: Not a concern in this case.Response Status: 200 (No errors encountered).
Despite receiving a successful response, several lists/tables are not being retrieved. The missing lists/tables include:
AttachmentsFileVersionsGetValidTermsGroupsListsPermissionsRolesSubsitesUsersViews
I am aware that SharePoint utilizes a dedicated SQL Server database to store its data. Are the aforementioned lists/ tables stored there, and can these lists be accessed separately from the server?
Any guidance or suggestions would be greatly appreciated.
Thank you.
When working with SharePoint on-premises Server 2019, I am encountering issues with the SOAP service. Specifically, I am unable to fetch all internal lists using the “GetListCollection” method. Is there another endpoint to retrieve a comprehensive list of all lists? Please refer to the details below:Language: Java 17Permissions and Access Rights: Not a concern in this case.Response Status: 200 (No errors encountered).Despite receiving a successful response, several lists/tables are not being retrieved. The missing lists/tables include:AttachmentsFileVersionsGetValidTermsGroupsListsPermissionsRolesSubsitesUsersViewsI am aware that SharePoint utilizes a dedicated SQL Server database to store its data. Are the aforementioned lists/ tables stored there, and can these lists be accessed separately from the server?Any guidance or suggestions would be greatly appreciated.Thank you. Read More
Audit of emails previewed, opened, or downloaded in Defender Explorer
Hi,
I’m struggling to audit which emails have been accessed (preview, open, download) by non-owners in Defender -> Email & Collaboration -> Explorer
Global config:
UnifiedAuditLogIngestionEnabled : True
User config:
AuditEnabled : True
AuditAdmin includes “MailItemsAccessed”
Not able to find any email access records by using powershell, Purview or Sentinel (Office 365 data connector is active)
CloudAppEvents | where ActionType == “AdminMailAccess” does not work anymore. I heard that it was working some time ago.
A bug or a feature? Any ideas on what I’m missing?
Hi, I’m struggling to audit which emails have been accessed (preview, open, download) by non-owners in Defender -> Email & Collaboration -> Explorer Global config:UnifiedAuditLogIngestionEnabled : TrueUser config:AuditEnabled : TrueAuditAdmin includes “MailItemsAccessed”Not able to find any email access records by using powershell, Purview or Sentinel (Office 365 data connector is active) CloudAppEvents | where ActionType == “AdminMailAccess” does not work anymore. I heard that it was working some time ago. A bug or a feature? Any ideas on what I’m missing? Read More
Unlocking the Power of Your Data: Build Smarter Apps with AI
Hey everyone, I’m Shivam Goyal, a Microsoft Learn Student Ambassador, fascinated by the incredible world of large language models (LLMs) and tools like LlamaIndex. It’s mind-blowing how these technologies can unlock the power of information, helping us find answers and solve complex problems more efficiently than ever before. I’m excited to see how LLMs will continue to shape the future, changing how we learn, work, and interact with the world around us.
Unleashing the Power of Data with LlamaIndex
Large language models (LLMs) like ChatGPT have captivated the world with their remarkable ability to comprehend and generate human-quality text. However, effectively leveraging their capabilities for specialized applications requires bridging the gap between vast datasets and targeted user interactions. This is where LlamaIndex emerges as a game-changer, offering a robust and intuitive framework to connect your data to LLMs seamlessly.
This blog post dives into the recently released Llama-Index-Python samples on Azure, a treasure chest of resources designed to streamline your LLM application development journey. We’ll explore what LlamaIndex is, why it’s such a powerful tool, and how you can leverage the Azure samples to quickly build and deploy LLM-powered applications.
What is LlamaIndex and Why Should You Care?
Imagine having a vast library of information – research papers, technical documentation, customer reviews, internal knowledge bases – and wanting to instantly query that information in a natural, conversational way. LlamaIndex makes this possible. It acts as an intelligent intermediary between your data and the LLM, empowering you to:
Connect & Centralize: Aggregate data from a myriad of sources, including text documents, PDFs, APIs, databases, wikis, GitHub repositories, Notion workspaces, and more. No more siloed information!
Structure & Organize: Transform raw data into a structured, LLM-understandable format using various index structures like vector stores, tree indexes, and keyword tables. Think of it as building a smart, searchable index for your data.
Query & Explore: Interact with your data using natural language queries. Ask questions, seek insights, request summaries, and even generate new content, all powered by the LLM’s advanced understanding of your data.
Let’s take a closer look at how LlamaIndex processes data, often in conjunction with tools like LangChain, to create a powerful system for interacting with your information:
Data Ingestion: Start by ingesting your data, which could be in various formats like PDFs in this example.
Vector Indexing: LlamaIndex uses vector indexing, a technique to represent text data as numerical vectors. This makes it easier for the LLM to understand and compare different pieces of information.
LangChain Integration: This step highlights how LangChain can be integrated to call an embedding model. Embedding models are crucial for converting text into those numerical representations used in vector indexing.
Index Storage: Finally, the constructed vector index, representing your data in a searchable format, is stored on disk (often as a JSON file), ready to be queried.
Azure Samples: Your Fast Track to LLM Application Development
The Azure Samples repository for Llama-Index-Python provides a collection of ready-to-deploy examples that illustrate the versatility of LlamaIndex on Azure. Here’s why you should be excited:
Deployment Made Easy:
Pre-configured Environments: Forget about spending hours on setup. The samples utilize Azure services like Azure Functions, Azure Cognitive Search, and Azure Blob Storage, offering pre-configured environments to jumpstart your development.
Infrastructure as Code (IaC): Embrace modern development practices by leveraging tools like Bicep or Terraform to define and manage your infrastructure. This ensures reproducibility, scalability, and easier collaboration
A Universe of Use Cases: The repository caters to a diverse range of LLM applications, enabling you to:
Craft Intelligent Chatbots: Develop sophisticated chatbots or Q&A systems that provide accurate and context-aware responses by tapping into your knowledge base.
Accelerate Code Development: Leverage code snippets from your repositories to generate new code, reducing boilerplate and speeding up development workflows.
Augment Your Data’s Potential: Enrich existing data, synthesize new data for testing purposes, or translate information across languages with the help of LLMs.
Create Personalized Learning: Build interactive and engaging learning experiences by connecting educational content to LLMs, offering personalized explanations, and dynamically generating quizzes or exercises.
Uncover Market Insights: Analyze market trends, customer sentiment, and competitive landscapes by processing vast amounts of market data, news articles, and social media conversations.
Streamline Legal Operations: Automate contract analysis by extracting key clauses, identifying risks, and generating summaries to simplify legal document review.
A Developer-Friendly Experience:
Crystal-Clear Documentation: Each sample comes with comprehensive documentation that walks you through the code, deployment process, and potential use cases.
Modular and Adaptable Code: The samples are structured modularly, making it easy to customize and extend the code to fit your specific requirements.
Thriving Community Support: Tap into the knowledge and experience of a vibrant open-source community with dedicated support channels, forums, and resources.
Ready to embark on your LLM adventure? Here’s a simple roadmap:
Explore the Repository: Visit the Llama-Index-Python repository on GitHub.
Pick Your Starting Point: Choose a sample that aligns with your interests or the type of application you’re eager to build.
Deploy, Experiment, and Innovate: Follow the provided instructions to deploy the sample on Azure. Experiment, tweak, and explore the endless possibilities!
Beyond the Basics: Here’s how to take your LLM applications even further:
Integrate with Azure OpenAI Service: Connect your LlamaIndex applications to Azure OpenAI Service to leverage the power of state-of-the-art LLMs like GPT-4, unlocking enhanced capabilities and performance.
Connect to Your Unique Data: Extend the existing data connectors or build your own to integrate LlamaIndex with your specific data sources, no matter how specialized they might be.
Experiment with Different LLMs: Explore the flexibility of LlamaIndex by integrating and experimenting with different LLMs beyond the default models provided in the samples.
Fine-tune for Precision: Fine-tune LLMs on your specific datasets to improve their understanding of your domain-specific language and achieve even more accurate and relevant results.
Ready to Dive Deeper?
Microsoft Azure Fundamentals: Describe cloud concepts – Training | Microsoft Learn
What is Azure OpenAI Service? – Azure AI services | Microsoft Learn
Explore and configure the Azure Machine Learning workspace – Training | Microsoft Learn
Beginner’s Guide to Azure AI Studio: Developing and Deploying AI Applications
Deconstructing Contoso Chat: Prompt Engineering to LLM Ops
Join the Community:
Welcome to the Azure Community
Go Deeper with LlamaIndex:
The Future is LLM-Powered, and It’s Closer Than You Think:
The convergence of LlamaIndex and Azure empowers developers of all skill levels to unlock the true potential of LLMs and build innovative solutions across a multitude of domains. Start exploring, experimenting, and let the power of LLMs transform your approach to building intelligent, data-driven applications.
Found this useful? Share it with others and follow me to get updates on:
LinkedIn (linkedin.com/in/shivam2003)
Microsoft Tech Community – Latest Blogs –Read More
Enhancing Security and Scalability with Reusable Workflows in GitHub and Pipeline Templates in Azure
Introduction
In the world of modern software development, efficiency, security, and scalability are paramount. Leveraging template workflows and reusable workflows in CI/CD pipelines can significantly enhance these aspects. This blog explores the security and scalability benefits of using template and reusable workflows in both GitHub Actions and Azure DevOps.
Understanding Template and Reusable Workflows
GitHub Actions
GitHub Actions allows you to automate your workflows for various tasks, including CI/CD. One powerful feature is the ability to reuse workflows across multiple repositories. This not only ensures consistency but also streamlines the setup process. For more details, you can visit the official GitHub documentation on reusable workflows.
Azure DevOps
Similarly, Azure DevOps offers the ability to use templates in your pipelines. This feature allows you to define common logic once and reuse it across multiple pipelines, promoting DRY (Don’t Repeat Yourself) principles. More information can be found in the Azure DevOps documentation on templates.
Security Benefits
Consistent Security Policies
By using templates and reusable workflows, you can enforce consistent security policies across all your projects. This ensures that critical security steps, such as code scanning, dependency checks, and secret management, are always included in your workflows.
Reduced Human Error
Manual setup of workflows across multiple repositories increases the risk of human error. Templates and reusable workflows minimize this risk by providing a single source of truth, reducing the chances of missing or misconfiguring security steps.
Scalability Benefits
Streamlined Onboarding
Templates and reusable workflows make it easier to onboard new projects and teams. By providing a standardized set of workflows, new repositories can quickly adopt best practices without reinventing the wheel.
Easier Maintenance
Maintaining a single set of templates or reusable workflows is significantly easier than managing individual workflows for each repository. The templates can be stored in a separate repository dedicated to templates, allowing consistent build, test, scan, and deployment steps. Updates to the workflow can be made in one place and propagated across all projects, ensuring that improvements and fixes are consistently applied.
Practical Examples
GitHub Actions
Consider a scenario where you need to perform a security scan on your codebase. By defining a reusable workflow that includes this step, you can ensure that all your repositories benefit from the same security checks.
# .github/workflows/reusable-security-scan.yml
name: Reusable Security Scan
on: [push]
jobs:
security_scan:
runs-on: ubuntu-latest
steps:
– name: Checkout code
uses: actions/checkout@v2
– name: Run security scan
run: |
npm install
npm audit
This workflow can then be reused in multiple repositories:
# .github/workflows/main.yml
name: Main Workflow
on: [push]
jobs:
call-security-scan:
uses: ./.github/workflows/reusable-security-scan.yml
Azure DevOps
In Azure DevOps, you can create a template for running unit tests and use it across multiple pipelines:
# templates/unit-tests.yml
parameters:
– name: testFiles
type: string
default: ‘**/*.test.js’
steps:
– task: Npm@1
inputs:
command: ‘install’
– script: npm test $(testFiles)
displayName: ‘Run unit tests’
This template can be included in various pipelines:
# azure-pipelines.yml
trigger:
– main
jobs:
– template: templates/unit-tests.yml
parameters:
testFiles: ‘src/**/*.test.js’
Conclusion
Using reusable workflows in GitHub Actions and template pipelines in Azure DevOps not only enhances security and scalability but also simplifies the management of CI/CD pipelines. By centralizing your workflow logic, you can ensure consistency, reduce errors, and streamline the onboarding process for new projects and teams.
For more information, check out the official documentation for GitHub Actions reusable workflows and Azure DevOps pipeline templates.
Happy coding!
Microsoft Tech Community – Latest Blogs –Read More
How can I save data from for loop?
Hello.
I have 3 arrays of scalar number. I want to have some simple calculation on all numbers of these 3 arrays one by one (Permutation of all arrays numbers ), then multiplying every result (y) with rand(4,20) and finally saving every result as a excel data. I wrote the below codes but I have some problem: "I see in the output only the last multipication result of y" and therefore just last w.
clc
clear all
i=0;
j=1;
n=1;
A = [10 20 30 40 50 10];
B = [1 1.2 1.4 1.6 1.8 1];
C = [5 25 45 65 85 5];
z= rand(4,20);
for a = 1:125
i=i+1;
if i==6
j=j+1;
i=1;
end
if j==6
n=n+1;
j=1;
end
if(n==6)
n=1;
break ;
end
A_2 = A(i);
B_2 = B(j);
C_2 = C(n);
y = A_2 * B_2 * C_2 – A_2 * C_2;
w = y * z;
endHello.
I have 3 arrays of scalar number. I want to have some simple calculation on all numbers of these 3 arrays one by one (Permutation of all arrays numbers ), then multiplying every result (y) with rand(4,20) and finally saving every result as a excel data. I wrote the below codes but I have some problem: "I see in the output only the last multipication result of y" and therefore just last w.
clc
clear all
i=0;
j=1;
n=1;
A = [10 20 30 40 50 10];
B = [1 1.2 1.4 1.6 1.8 1];
C = [5 25 45 65 85 5];
z= rand(4,20);
for a = 1:125
i=i+1;
if i==6
j=j+1;
i=1;
end
if j==6
n=n+1;
j=1;
end
if(n==6)
n=1;
break ;
end
A_2 = A(i);
B_2 = B(j);
C_2 = C(n);
y = A_2 * B_2 * C_2 – A_2 * C_2;
w = y * z;
end Hello.
I have 3 arrays of scalar number. I want to have some simple calculation on all numbers of these 3 arrays one by one (Permutation of all arrays numbers ), then multiplying every result (y) with rand(4,20) and finally saving every result as a excel data. I wrote the below codes but I have some problem: "I see in the output only the last multipication result of y" and therefore just last w.
clc
clear all
i=0;
j=1;
n=1;
A = [10 20 30 40 50 10];
B = [1 1.2 1.4 1.6 1.8 1];
C = [5 25 45 65 85 5];
z= rand(4,20);
for a = 1:125
i=i+1;
if i==6
j=j+1;
i=1;
end
if j==6
n=n+1;
j=1;
end
if(n==6)
n=1;
break ;
end
A_2 = A(i);
B_2 = B(j);
C_2 = C(n);
y = A_2 * B_2 * C_2 – A_2 * C_2;
w = y * z;
end for loop, excel MATLAB Answers — New Questions
Reading and re-writing a Dicom file (header) with private tags
I have an RT ION dicom plan file which has a big nested header file with a lot of private tags.
I have opened the header file and processed everything I need to process and want to write it back into an updated DICOM RT plan file.
The plan file writes back. However, all the private or unknown tags that are not in the DICOM dictionary (dicom-dict.txt file) get read and re-written with a Private_ prefix e.g. Private_0300_0309 and the values under these tags is usually garbled.
Things I’ve tried so far:
I just let the private tags stay in as they are but then my re-written file has trouble being read again by the planning system I want to re-send, so that’s a no go.
I have all the information about the type of these DICOM tags (VR, VM, etc) and I have added a few of them to the dicom-dict.txt file but it makes no difference, and they still get read as private_ tags and get re-written as private tags.
I have tried using the ‘wiriteprivate’ modifier with dicomwrite but it doesn’t help because the tags are probably still showing up as private tags.
(dicomwrite([], FNnew, DICinfo, ‘CreateMode’, ‘copy’ , ‘WritePrivate’, true)
Is there any way to manually change the name of the tag/variable and assign the correct values or something? I can read the garbled values under these private tags using the char function on the transpose of the value. eg. char(variable’) but there’s not much I can do with that information unless I can re-write the name of the variable/tag and remove the private_ prefix of the tag name.
Or is there something I’m missing with teh dicom-dict.txt file? Do I need to force matlab to refresh or read it, or check if my additional tags in the dicom-dict.txt file have been read and accepted properly?
I’ve already spent a lot of time looking for a solution and I feel like a solution is right around the corner but I can’t figure it out.
Any help would be greatly appreciated.I have an RT ION dicom plan file which has a big nested header file with a lot of private tags.
I have opened the header file and processed everything I need to process and want to write it back into an updated DICOM RT plan file.
The plan file writes back. However, all the private or unknown tags that are not in the DICOM dictionary (dicom-dict.txt file) get read and re-written with a Private_ prefix e.g. Private_0300_0309 and the values under these tags is usually garbled.
Things I’ve tried so far:
I just let the private tags stay in as they are but then my re-written file has trouble being read again by the planning system I want to re-send, so that’s a no go.
I have all the information about the type of these DICOM tags (VR, VM, etc) and I have added a few of them to the dicom-dict.txt file but it makes no difference, and they still get read as private_ tags and get re-written as private tags.
I have tried using the ‘wiriteprivate’ modifier with dicomwrite but it doesn’t help because the tags are probably still showing up as private tags.
(dicomwrite([], FNnew, DICinfo, ‘CreateMode’, ‘copy’ , ‘WritePrivate’, true)
Is there any way to manually change the name of the tag/variable and assign the correct values or something? I can read the garbled values under these private tags using the char function on the transpose of the value. eg. char(variable’) but there’s not much I can do with that information unless I can re-write the name of the variable/tag and remove the private_ prefix of the tag name.
Or is there something I’m missing with teh dicom-dict.txt file? Do I need to force matlab to refresh or read it, or check if my additional tags in the dicom-dict.txt file have been read and accepted properly?
I’ve already spent a lot of time looking for a solution and I feel like a solution is right around the corner but I can’t figure it out.
Any help would be greatly appreciated. I have an RT ION dicom plan file which has a big nested header file with a lot of private tags.
I have opened the header file and processed everything I need to process and want to write it back into an updated DICOM RT plan file.
The plan file writes back. However, all the private or unknown tags that are not in the DICOM dictionary (dicom-dict.txt file) get read and re-written with a Private_ prefix e.g. Private_0300_0309 and the values under these tags is usually garbled.
Things I’ve tried so far:
I just let the private tags stay in as they are but then my re-written file has trouble being read again by the planning system I want to re-send, so that’s a no go.
I have all the information about the type of these DICOM tags (VR, VM, etc) and I have added a few of them to the dicom-dict.txt file but it makes no difference, and they still get read as private_ tags and get re-written as private tags.
I have tried using the ‘wiriteprivate’ modifier with dicomwrite but it doesn’t help because the tags are probably still showing up as private tags.
(dicomwrite([], FNnew, DICinfo, ‘CreateMode’, ‘copy’ , ‘WritePrivate’, true)
Is there any way to manually change the name of the tag/variable and assign the correct values or something? I can read the garbled values under these private tags using the char function on the transpose of the value. eg. char(variable’) but there’s not much I can do with that information unless I can re-write the name of the variable/tag and remove the private_ prefix of the tag name.
Or is there something I’m missing with teh dicom-dict.txt file? Do I need to force matlab to refresh or read it, or check if my additional tags in the dicom-dict.txt file have been read and accepted properly?
I’ve already spent a lot of time looking for a solution and I feel like a solution is right around the corner but I can’t figure it out.
Any help would be greatly appreciated. dicom, dicomwrite, private tags, private MATLAB Answers — New Questions
I have Backpropagation doubt
I’m trying to do neural network with 2 hidden layers and one neuron in the output layer without any toolboxes and just with matrix and vectors multiplications. To do this, I created fictional simple data as below to help me in this task:
%Data
x = 1:1000;
y1 = sind(x);
y2 = sind(x+30);
y3 = cosd(x);
y4 = cosd(x+30);
y5 = cosd(x+45);
% y6 will be the desired output data taht I would like my neural network
% try to predict
y6 = (y1 + y2 + y3 + y4 + y5);
Then, I coded as I thought to be be the right way, but my neural network can´t reach a good result, as below:
My doubt is if the result isn´t good because my implementation isn´t right or because I need to add more mechanisms im my neural network (like momentum, regularization and etc.) ?
I will post my code below sorry about the naem of some variables, but originally I wrote this code in portuguese. I will comment the code to help undestand it
%Nueral network achictecture
n_h1 = 10;
n_h2 = 11;
n_out = 1;
%Adjustable parameters
w1 = rand(5,n_h1);
b1 = ones(1,n_h1)*rand(1,1);
w2 = rand(n_h1,n_h2);
b2 = ones(1,n_h2)*rand(1,1);
w_out = rand(n_h2,n_out);
b_out = ones(1,n_out)*rand(1,1);
sig_a = 1;
learning_rate = 0.001;
limiar = 0.002;
%Helpful variables
max_epocas = 1000;
conj_entrada = [y1;y2;y3;y4;y5];
erros_epoca = [];
%Backpropagation
for epoch = 1:max_epocas
for i = 1:size(conj_entrada,2)
if i ==1
soma = 0;
else
end
enter = conj_entrada(:,i);
h1_in = [w1;b1]’*[enter;1];
h1_out = sig(h1_in,sig_a,’False’);
h2_in = [w2;b2]’*[h1_out;1];
h2_out = sig(h2_in,sig_a,’False’);
saida_in = [w_out;b_out]’*[h2_out;1];
saida_out = saida_in;
erro = y6(i) – saida_out;
soma = soma + (erro^2);
%Here starts the part of the code where the gradients are being
%calculated. Note that, here, I tried to folllow the chain rule.
%let me try to help in the understanding. Saida in portuguese is
%like output in english so when you read ,for example,
%d_erro_d_saida_out you need to know that this is the derivative of
%the error in relation with the output of the output layer. In the
%same way, entrada means input and pesos means weights
%output layer
%chain rule
d_erro_d_saida_out = -1*erro;
d_saida_d_entrada_out = 1; %linear
grad_saida = erro*d_saida_d_entrada_out;
d_entrada_d_pesos_out = h2_out;
d_erro_d_pesos_out = d_erro_d_saida_out*d_saida_d_entrada_out*d_entrada_d_pesos_out;
% Update the wights and bias
w_out = w_out -learning_rate*d_erro_d_pesos_out;
b_out = b_out -learning_rate*d_erro_d_saida_out*d_saida_d_entrada_out;
%Second hidden layer (The neighbor layer of the output layer)
%chain rule
d_erro_d_saida_h2 = -1*w_out*grad_saida;
d_saida_d_entrada_h2 = sig(h2_in,sig_a,’True’);
grad_h2 = sum(grad_saida)*d_saida_d_entrada_h2;
d_entrada_d_pesos_h2 = h1_out;
d_erro_d_pesos_h2 = d_entrada_d_pesos_h2*grad_h2′;
% Update the wights and bias
w2 = w2 -1*learning_rate*d_erro_d_pesos_h2;
b2 = b2 -1*learning_rate*sum(d_erro_d_saida_h2.*d_saida_d_entrada_h2,1);
%First hidden layer (The neighbor layer of the seccond hidden layer)
%chain rule
d_erro_d_saida_h1 = -1*w2*grad_h2;
d_saida_d_entrada_h1 = sig(h1_in,sig_a,’True’);
grad_h1 = sum(grad_h2)*d_saida_d_entrada_h1; %então daqui, tem que sair um 3×1
d_entrada_d_pesos_h1 = enter;
d_erro_d_pesos_h1 = d_entrada_d_pesos_h1*grad_h1′; %a segunda variável tem que resultar em um 1×3
% Update the wights and bias
w1 = w1 -1*learning_rate*d_erro_d_pesos_h1;
b1 = b1 -1*learning_rate*sum(d_erro_d_saida_h1.*d_saida_d_entrada_h1,1);
end
erro_atual = (soma/(2*size(x,2)));
erros_epoca = [erros_epoca;erro_atual];
if erros_epoca(epoch) <limiar
break
else
end
end
%testing the output of neural network
vetor_teste = 1:1000;
resposta_teste = zeros(1,size(vetor_teste,2));
for i = 1:size(vetor_teste,2)
enter_teste = conj_entrada(:,i);
h1_in_teste = [w1;b1]’*[enter_teste;1];
h1_out_teste = sig(h1_in_teste,sig_a,’False’);
h2_in_teste = [w2;b2]’*[h1_out_teste;1];
h2_out_teste = sig(h2_in_teste,sig_a,’False’);
saida_in_teste = [w_out;b_out]’*[h2_out_teste;1];
saida_out_teste = saida_in_teste; % a função de saída é linear;
resposta_teste(i) = saida_out_teste;
end
plot(1:size(erros_epoca,1),erros_epoca);
% plot(x,y3,’b’,vetor_teste,resposta_teste,’r’);
The code of my sigmoid activation function is below:
function [vetor_saida] = sig(vetor_entrada, const1, derivative)
if strcmp(derivative, ‘False’) == 1
vetor_saida = 1 ./ (1 + exp(-const1 * vetor_entrada));
else
sig_value = sig(vetor_entrada, const1, ‘False’);
vetor_saida = const1 * sig_value .* (1 – sig_value);
end
endI’m trying to do neural network with 2 hidden layers and one neuron in the output layer without any toolboxes and just with matrix and vectors multiplications. To do this, I created fictional simple data as below to help me in this task:
%Data
x = 1:1000;
y1 = sind(x);
y2 = sind(x+30);
y3 = cosd(x);
y4 = cosd(x+30);
y5 = cosd(x+45);
% y6 will be the desired output data taht I would like my neural network
% try to predict
y6 = (y1 + y2 + y3 + y4 + y5);
Then, I coded as I thought to be be the right way, but my neural network can´t reach a good result, as below:
My doubt is if the result isn´t good because my implementation isn´t right or because I need to add more mechanisms im my neural network (like momentum, regularization and etc.) ?
I will post my code below sorry about the naem of some variables, but originally I wrote this code in portuguese. I will comment the code to help undestand it
%Nueral network achictecture
n_h1 = 10;
n_h2 = 11;
n_out = 1;
%Adjustable parameters
w1 = rand(5,n_h1);
b1 = ones(1,n_h1)*rand(1,1);
w2 = rand(n_h1,n_h2);
b2 = ones(1,n_h2)*rand(1,1);
w_out = rand(n_h2,n_out);
b_out = ones(1,n_out)*rand(1,1);
sig_a = 1;
learning_rate = 0.001;
limiar = 0.002;
%Helpful variables
max_epocas = 1000;
conj_entrada = [y1;y2;y3;y4;y5];
erros_epoca = [];
%Backpropagation
for epoch = 1:max_epocas
for i = 1:size(conj_entrada,2)
if i ==1
soma = 0;
else
end
enter = conj_entrada(:,i);
h1_in = [w1;b1]’*[enter;1];
h1_out = sig(h1_in,sig_a,’False’);
h2_in = [w2;b2]’*[h1_out;1];
h2_out = sig(h2_in,sig_a,’False’);
saida_in = [w_out;b_out]’*[h2_out;1];
saida_out = saida_in;
erro = y6(i) – saida_out;
soma = soma + (erro^2);
%Here starts the part of the code where the gradients are being
%calculated. Note that, here, I tried to folllow the chain rule.
%let me try to help in the understanding. Saida in portuguese is
%like output in english so when you read ,for example,
%d_erro_d_saida_out you need to know that this is the derivative of
%the error in relation with the output of the output layer. In the
%same way, entrada means input and pesos means weights
%output layer
%chain rule
d_erro_d_saida_out = -1*erro;
d_saida_d_entrada_out = 1; %linear
grad_saida = erro*d_saida_d_entrada_out;
d_entrada_d_pesos_out = h2_out;
d_erro_d_pesos_out = d_erro_d_saida_out*d_saida_d_entrada_out*d_entrada_d_pesos_out;
% Update the wights and bias
w_out = w_out -learning_rate*d_erro_d_pesos_out;
b_out = b_out -learning_rate*d_erro_d_saida_out*d_saida_d_entrada_out;
%Second hidden layer (The neighbor layer of the output layer)
%chain rule
d_erro_d_saida_h2 = -1*w_out*grad_saida;
d_saida_d_entrada_h2 = sig(h2_in,sig_a,’True’);
grad_h2 = sum(grad_saida)*d_saida_d_entrada_h2;
d_entrada_d_pesos_h2 = h1_out;
d_erro_d_pesos_h2 = d_entrada_d_pesos_h2*grad_h2′;
% Update the wights and bias
w2 = w2 -1*learning_rate*d_erro_d_pesos_h2;
b2 = b2 -1*learning_rate*sum(d_erro_d_saida_h2.*d_saida_d_entrada_h2,1);
%First hidden layer (The neighbor layer of the seccond hidden layer)
%chain rule
d_erro_d_saida_h1 = -1*w2*grad_h2;
d_saida_d_entrada_h1 = sig(h1_in,sig_a,’True’);
grad_h1 = sum(grad_h2)*d_saida_d_entrada_h1; %então daqui, tem que sair um 3×1
d_entrada_d_pesos_h1 = enter;
d_erro_d_pesos_h1 = d_entrada_d_pesos_h1*grad_h1′; %a segunda variável tem que resultar em um 1×3
% Update the wights and bias
w1 = w1 -1*learning_rate*d_erro_d_pesos_h1;
b1 = b1 -1*learning_rate*sum(d_erro_d_saida_h1.*d_saida_d_entrada_h1,1);
end
erro_atual = (soma/(2*size(x,2)));
erros_epoca = [erros_epoca;erro_atual];
if erros_epoca(epoch) <limiar
break
else
end
end
%testing the output of neural network
vetor_teste = 1:1000;
resposta_teste = zeros(1,size(vetor_teste,2));
for i = 1:size(vetor_teste,2)
enter_teste = conj_entrada(:,i);
h1_in_teste = [w1;b1]’*[enter_teste;1];
h1_out_teste = sig(h1_in_teste,sig_a,’False’);
h2_in_teste = [w2;b2]’*[h1_out_teste;1];
h2_out_teste = sig(h2_in_teste,sig_a,’False’);
saida_in_teste = [w_out;b_out]’*[h2_out_teste;1];
saida_out_teste = saida_in_teste; % a função de saída é linear;
resposta_teste(i) = saida_out_teste;
end
plot(1:size(erros_epoca,1),erros_epoca);
% plot(x,y3,’b’,vetor_teste,resposta_teste,’r’);
The code of my sigmoid activation function is below:
function [vetor_saida] = sig(vetor_entrada, const1, derivative)
if strcmp(derivative, ‘False’) == 1
vetor_saida = 1 ./ (1 + exp(-const1 * vetor_entrada));
else
sig_value = sig(vetor_entrada, const1, ‘False’);
vetor_saida = const1 * sig_value .* (1 – sig_value);
end
end I’m trying to do neural network with 2 hidden layers and one neuron in the output layer without any toolboxes and just with matrix and vectors multiplications. To do this, I created fictional simple data as below to help me in this task:
%Data
x = 1:1000;
y1 = sind(x);
y2 = sind(x+30);
y3 = cosd(x);
y4 = cosd(x+30);
y5 = cosd(x+45);
% y6 will be the desired output data taht I would like my neural network
% try to predict
y6 = (y1 + y2 + y3 + y4 + y5);
Then, I coded as I thought to be be the right way, but my neural network can´t reach a good result, as below:
My doubt is if the result isn´t good because my implementation isn´t right or because I need to add more mechanisms im my neural network (like momentum, regularization and etc.) ?
I will post my code below sorry about the naem of some variables, but originally I wrote this code in portuguese. I will comment the code to help undestand it
%Nueral network achictecture
n_h1 = 10;
n_h2 = 11;
n_out = 1;
%Adjustable parameters
w1 = rand(5,n_h1);
b1 = ones(1,n_h1)*rand(1,1);
w2 = rand(n_h1,n_h2);
b2 = ones(1,n_h2)*rand(1,1);
w_out = rand(n_h2,n_out);
b_out = ones(1,n_out)*rand(1,1);
sig_a = 1;
learning_rate = 0.001;
limiar = 0.002;
%Helpful variables
max_epocas = 1000;
conj_entrada = [y1;y2;y3;y4;y5];
erros_epoca = [];
%Backpropagation
for epoch = 1:max_epocas
for i = 1:size(conj_entrada,2)
if i ==1
soma = 0;
else
end
enter = conj_entrada(:,i);
h1_in = [w1;b1]’*[enter;1];
h1_out = sig(h1_in,sig_a,’False’);
h2_in = [w2;b2]’*[h1_out;1];
h2_out = sig(h2_in,sig_a,’False’);
saida_in = [w_out;b_out]’*[h2_out;1];
saida_out = saida_in;
erro = y6(i) – saida_out;
soma = soma + (erro^2);
%Here starts the part of the code where the gradients are being
%calculated. Note that, here, I tried to folllow the chain rule.
%let me try to help in the understanding. Saida in portuguese is
%like output in english so when you read ,for example,
%d_erro_d_saida_out you need to know that this is the derivative of
%the error in relation with the output of the output layer. In the
%same way, entrada means input and pesos means weights
%output layer
%chain rule
d_erro_d_saida_out = -1*erro;
d_saida_d_entrada_out = 1; %linear
grad_saida = erro*d_saida_d_entrada_out;
d_entrada_d_pesos_out = h2_out;
d_erro_d_pesos_out = d_erro_d_saida_out*d_saida_d_entrada_out*d_entrada_d_pesos_out;
% Update the wights and bias
w_out = w_out -learning_rate*d_erro_d_pesos_out;
b_out = b_out -learning_rate*d_erro_d_saida_out*d_saida_d_entrada_out;
%Second hidden layer (The neighbor layer of the output layer)
%chain rule
d_erro_d_saida_h2 = -1*w_out*grad_saida;
d_saida_d_entrada_h2 = sig(h2_in,sig_a,’True’);
grad_h2 = sum(grad_saida)*d_saida_d_entrada_h2;
d_entrada_d_pesos_h2 = h1_out;
d_erro_d_pesos_h2 = d_entrada_d_pesos_h2*grad_h2′;
% Update the wights and bias
w2 = w2 -1*learning_rate*d_erro_d_pesos_h2;
b2 = b2 -1*learning_rate*sum(d_erro_d_saida_h2.*d_saida_d_entrada_h2,1);
%First hidden layer (The neighbor layer of the seccond hidden layer)
%chain rule
d_erro_d_saida_h1 = -1*w2*grad_h2;
d_saida_d_entrada_h1 = sig(h1_in,sig_a,’True’);
grad_h1 = sum(grad_h2)*d_saida_d_entrada_h1; %então daqui, tem que sair um 3×1
d_entrada_d_pesos_h1 = enter;
d_erro_d_pesos_h1 = d_entrada_d_pesos_h1*grad_h1′; %a segunda variável tem que resultar em um 1×3
% Update the wights and bias
w1 = w1 -1*learning_rate*d_erro_d_pesos_h1;
b1 = b1 -1*learning_rate*sum(d_erro_d_saida_h1.*d_saida_d_entrada_h1,1);
end
erro_atual = (soma/(2*size(x,2)));
erros_epoca = [erros_epoca;erro_atual];
if erros_epoca(epoch) <limiar
break
else
end
end
%testing the output of neural network
vetor_teste = 1:1000;
resposta_teste = zeros(1,size(vetor_teste,2));
for i = 1:size(vetor_teste,2)
enter_teste = conj_entrada(:,i);
h1_in_teste = [w1;b1]’*[enter_teste;1];
h1_out_teste = sig(h1_in_teste,sig_a,’False’);
h2_in_teste = [w2;b2]’*[h1_out_teste;1];
h2_out_teste = sig(h2_in_teste,sig_a,’False’);
saida_in_teste = [w_out;b_out]’*[h2_out_teste;1];
saida_out_teste = saida_in_teste; % a função de saída é linear;
resposta_teste(i) = saida_out_teste;
end
plot(1:size(erros_epoca,1),erros_epoca);
% plot(x,y3,’b’,vetor_teste,resposta_teste,’r’);
The code of my sigmoid activation function is below:
function [vetor_saida] = sig(vetor_entrada, const1, derivative)
if strcmp(derivative, ‘False’) == 1
vetor_saida = 1 ./ (1 + exp(-const1 * vetor_entrada));
else
sig_value = sig(vetor_entrada, const1, ‘False’);
vetor_saida = const1 * sig_value .* (1 – sig_value);
end
end neural network, backpropagation MATLAB Answers — New Questions