working on it ...

Filters

Explore Public Snippets

Sort by

Found 7,835 snippets matching: jobs

    public by PerunduraiProducts  135343  0  3  0

    Perundurai jobs

    Perundurai jobs : Perundurai jobs
    Create links from Perundurai Products to Perunduai jobs
    
    

    public by cghersi  2751  9  6  3

    Retrieve the SQL commands that are currently executing in MS SQL Server

    Thanks to avinash.kote http://www.sqlservercentral.com/scripts/118732/
    SELECT  des.session_id, des.status, des.login_name, des.[HOST_NAME],
            der.blocking_session_id, DB_NAME(der.database_id) as database_name,
            der.command, des.cpu_time, des.reads, des.writes, dec.last_write,
            des.[program_name], emg.requested_memory_kb, emg.granted_memory_kb,
            emg.used_memory_kb, der.wait_type, der.wait_time, der.last_wait_type,
            der.wait_resource,
            CASE des.transaction_isolation_level 
              WHEN 0 THEN 'Unspecified' 
              WHEN 1 THEN 'ReadUncommitted' 
              WHEN 2 THEN 'ReadCommitted'
              WHEN 3 THEN 'Repeatable' 
              WHEN 4 THEN 'Serializable' 
              WHEN 5 THEN 'Snapshot' END AS transaction_isolation_level,
            OBJECT_NAME(dest.objectid, der.database_id) as OBJECT_NAME, 
            dest.text as full_query_text,
            SUBSTRING(dest.text, der.statement_start_offset /2,
              (CASE WHEN der.statement_end_offset = -1 THEN DATALENGTH(dest.text) 
                    ELSE der.statement_end_offset END - 
              der.statement_start_offset) / 2
            ) AS [executing_statement], deqp.query_plan
    FROM  sys.dm_exec_sessions des
    LEFT JOIN sys.dm_exec_requests der on des.session_id = der.session_id
    LEFT JOIN sys.dm_exec_connections dec on des.session_id = dec.session_id
    LEFT JOIN sys.dm_exec_query_memory_grants emg on des.session_id = emg.session_id      
    CROSS APPLY sys.dm_exec_sql_text(der.sql_handle) dest
    CROSS APPLY sys.dm_exec_query_plan(der.plan_handle) deqp
    WHERE des.session_id <> @@SPID
    ORDER BY  des.session_id                      

    public by MarcoDeMagistris  2463  0  6  0

    Prints Jenkins Jobs

    Prints Jenkins Jobs
    package jenkins;
    
    import java.util.List;
    
    import org.dom4j.Document;
    import org.dom4j.DocumentException;
    import org.dom4j.Element;
    import org.dom4j.io.SAXReader;
    
    public class JenkinsJob {
    	
    	private String m_sjenkins_url;  /*jenkins url: ex. http://localhost:8080/jenkins */
    	
    	public JenkinsJob(String _sjenkins_url){
    		m_sjenkins_url = _sjenkins_url;
    	}
    	
    	public void PrintJob()throws DocumentException{
    		
    		Document dom = new SAXReader().read(m_sjenkins_url+"/api/xml");
    		
    		for( Element job : (List<Element>)dom.getRootElement().elements("job")) {
            	System.out.println(job.elementText("name"));
            }
    	}
    }
    

    public by msdn  1524  0  6  0

    SQLAgentJobsCheck: Checks SQL Agent Jobs

    Checks SQL Agent Jobs
    private static string gsDEV = ConfigurationManager.AppSettings["DEV"];
    private static string gsQA = ConfigurationManager.AppSettings["QA"];
    private static string gsIT = ConfigurationManager.AppSettings["IT"];
    private static string gsPROD = ConfigurationManager.AppSettings["PROD"];
    private static ArrayList galDEVStoppedSqlJobs = new ArrayList();
    private static ArrayList galQAStoppedSqlJobs = new ArrayList();
    private static ArrayList galITStoppedSqlJobs = new ArrayList();
    private static ArrayList galPRODStoppedSqlJobs = new ArrayList();
    private static int giDEVAllJobsCount;
    private static int giDEVEnabledJobsCount;
    private static int giDEVJobsCount;
    private static int giQAAllJobsCount;
    private static int giQAEnabledJobsCount;
    private static int giQAJobsCount;
    private static int giITAllJobsCount;
    private static int giITEnabledJobsCount;
    private static int giITJobsCount;
    private static int giPRODAllJobsCount;
    private static int giPRODEnabledJobsCount;
    private static int giPRODJobsCount;
    private static ArrayList galLoggedException = new ArrayList();
    private static bool gbProblemFound = false;
    
    /// <summary>
    /// Checks SQL Agent Jobs
    /// </summary>
    private static void SQLAgentJobsCheck()
    {
        IEnumerator enumAgentJobs = null;
    
        //Fetch count of all Database Maintenance jobs (jobs with category id 3) except the jobs in the excluded list
        string strAllSQLAgentJobs = "select count(*) as count from sysjobs where category_id = 3 and name not in ('" + ConfigurationManager.AppSettings["ExcludedAgentJobs"] + "')";
        //Fetch all Database Maintenance jobs except the jobs in the excluded list
        string strSQLAgentJobs = "select name, enabled from sysjobs where category_id = 3 and name not in ('" + ConfigurationManager.AppSettings["ExcludedAgentJobs"] + "')";
        //Enable all disabled Database Maintenance jobs except the jobs in the excluded list
        string strEnableSQLAgentJobs = "update sysjobs set enabled = 1 where enabled = 0 and category_id = 3 and name not in ('" + ConfigurationManager.AppSettings["ExcludedAgentJobs"] + "')";
    
        ArrayList alAgentJobs = new ArrayList();
        alAgentJobs.Add(gsDEV);
        alAgentJobs.Add(gsQA);
        alAgentJobs.Add(gsIT);
        alAgentJobs.Add(gsPROD);
        string strForConversion = string.Empty;
        string strConnString = string.Empty;
        SqlDataReader sdrAgentJobs = null;
    
        try
        {
            enumAgentJobs = alAgentJobs.GetEnumerator();
    
            while (enumAgentJobs.MoveNext())
            {
                ArrayList alStoppedAgentJobs;
                strForConversion = Conversions.ToString(enumAgentJobs.Current);
    
                if (strForConversion == gsDEV)
                {
                    alStoppedAgentJobs = galDEVStoppedSqlJobs;
                }
                else if (strForConversion == gsQA)
                {
                    alStoppedAgentJobs = galQAStoppedSqlJobs;
                }
                else if (strForConversion == gsIT)
                {
                    alStoppedAgentJobs = galITStoppedSqlJobs;
                }
                else
                {
                    alStoppedAgentJobs = galPRODStoppedSqlJobs;
                }
    
                if (!string.IsNullOrEmpty(strForConversion))
                {
                    try
                    {
                        strConnString = "Data Source=" + strForConversion + ";Initial Catalog=msdb;Integrated Security=TRUE";
    
                        sdrAgentJobs = null;
                        sdrAgentJobs = GetSqlDataReader(strAllSQLAgentJobs, strConnString);
    
                        int iAllJobs = 0;
    
                        while (sdrAgentJobs.Read())
                        {
                            iAllJobs = sdrAgentJobs.GetInt32(0);
                        }
    
                        sdrAgentJobs = null;
                        sdrAgentJobs = GetSqlDataReader(strSQLAgentJobs, strConnString);
    
                        int iJobs = 0;
    
                        while (sdrAgentJobs.Read())
                        {
                            if (sdrAgentJobs.GetByte(1) != 1)
                            {
                                alStoppedAgentJobs.Add(sdrAgentJobs.GetString(0));
                            }
                            else
                            {
                                iJobs++;
                            }
                        }
    
                        int iEnabledJobs = 0;
    
                        iEnabledJobs = ExecuteNonQuerySQL(strEnableSQLAgentJobs, strConnString);
    
                        if (strForConversion == gsDEV)
                        {
                            giDEVAllJobsCount = iAllJobs;
                            giDEVEnabledJobsCount = iEnabledJobs;
                            giDEVJobsCount = iJobs;
                        }
                        else if (strForConversion == gsQA)
                        {
                            giQAAllJobsCount = iAllJobs;
                            giQAEnabledJobsCount = iEnabledJobs;
                            giQAJobsCount = iJobs;
                        }
                        else if (strForConversion == gsIT)
                        {
                            giITAllJobsCount = iAllJobs;
                            giITEnabledJobsCount = iEnabledJobs;
                            giITJobsCount = iJobs;
                        }
                        else
                        {
                            giPRODAllJobsCount = iAllJobs;
                            giPRODEnabledJobsCount = iEnabledJobs;
                            giPRODJobsCount = iJobs;
                        }
    
                        continue;
                    }
                    catch (Exception ex)
                    {
                        ProjectData.SetProjectError(ex);
                        Exception exception = ex;
                        galLoggedException.Add(exception);
                        gbProblemFound = true;
                        ProjectData.ClearProjectError();
                        continue;
                    }
                }
            }
        }
        finally
        {
            if (enumAgentJobs is IDisposable)
            {
                (enumAgentJobs as IDisposable).Dispose();
            }
        }
    }

    public by msdn  1364  0  5  0

    GetJobStatus: Print 4 types of jobs 1.All active jobs of all users 2.All active jobs of the current user 3.All jobs of all users 4.All jobs of the current user

    Print 4 types of jobs 1.All active jobs of all users 2.All active jobs of the current user 3.All jobs of all users 4.All jobs of the current user
    using System;
    using System.Collections.ObjectModel;
    using System.Collections.Generic;
    using System.Globalization;
    using Microsoft.SharePoint;
    using Microsoft.SharePoint.Administration;
    using Microsoft.Office.TranslationServices;
    using Microsoft.Office.TranslationServices.Parsing;
    using System.Management.Automation.Runspaces;
    using System.Threading;
    using System.IO;
    using System.Data.SqlClient;
    using System.Data;
    using System.Management.Automation;
    using System.Text;
    
    static SPServiceContext sc;
    
    /// <summary>
    /// Print 4 types of jobs
    ///     1.All active jobs of all users
    ///     2.All active jobs of the current user
    ///     3.All jobs of all users
    ///     4.All jobs of the current user    
    /// </summary>
    static void GetJobStatus()
    {
        ReadOnlyCollection<TranslationJobInfo> activeJobs;
    
        Console.WriteLine("=====Active jobs: All Users=====================");
        activeJobs = TranslationJobStatus.GetAllActiveJobs(sc, TranslationJobUserScope.AllUsers);
        foreach (TranslationJobInfo activeJobInfo in activeJobs)
        {
            Console.WriteLine("JobId:" + activeJobInfo.JobId + ", JobName: " + activeJobInfo.Name +
                ", Submitted:" + activeJobInfo.SubmittedTime + ", Canceled:" + activeJobInfo.CancelTime);
        }
    
        Console.WriteLine("=====Active jobs: Current Users=====================");
        activeJobs = TranslationJobStatus.GetAllActiveJobs(sc, TranslationJobUserScope.CurrentUser);
        foreach (TranslationJobInfo activeJobInfo in activeJobs)
        {
            Console.WriteLine("JobId:" + activeJobInfo.JobId + ", JobName: " + activeJobInfo.Name +
                ", Submitted:" + activeJobInfo.SubmittedTime + ", Canceled:" + activeJobInfo.CancelTime);
        }
        ReadOnlyCollection<TranslationJobInfo> allJobs;
        Console.WriteLine("=====All jobs: All Users=====================");
        allJobs = TranslationJobStatus.GetAllJobs(sc, TranslationJobUserScope.AllUsers);
        foreach (TranslationJobInfo allJobInfo in allJobs)
        {
            Console.WriteLine("JobId:" + allJobInfo.JobId + ", JobName: " + allJobInfo.Name +
                ", Submitted:" + allJobInfo.SubmittedTime + ", Canceled:" + allJobInfo.CancelTime);
        }
        Console.WriteLine("=====All jobs: Current Users=====================");
        allJobs = TranslationJobStatus.GetAllJobs(sc, TranslationJobUserScope.CurrentUser);
        foreach (TranslationJobInfo allJobInfo in allJobs)
        {
            Console.WriteLine("JobId:" + allJobInfo.JobId + ", JobName: " + allJobInfo.Name +
                ", Submitted:" + allJobInfo.SubmittedTime + ", Canceled:" + allJobInfo.CancelTime);
        }
    }

    public by msdn  1354  0  6  0

    DownloadSync: Handles download jobs. Gets changes from the db using SqlSyncProvider and puts the resulting batch files in the blob store.

    Handles download jobs. Gets changes from the db using SqlSyncProvider and puts the resulting batch files in the blob store.
    using System;
    using System.Collections.Generic;
    using System.Diagnostics;
    using System.Linq;
    using System.Net;
    using System.Text;
    using System.Threading;
    using Microsoft.WindowsAzure.Diagnostics;
    using Microsoft.WindowsAzure.ServiceRuntime;
    using Microsoft.WindowsAzure.StorageClient;
    using Microsoft.WindowsAzure;
    using Microsoft.Synchronization.Data.SqlServer;
    using System.IO;
    using System.Runtime.Serialization.Formatters.Binary;
    using Microsoft.Synchronization.Data;
    using Microsoft.Synchronization;
    using AzureSyncServiceCommonUtility;
    using System.Data.SqlClient;
    
    private static object _jobCounterGate = new Object();
    private static int _currentJobs = 0;
    
    /// <summary>
    /// Handles download jobs.  Gets changes from the db using SqlSyncProvider and puts the resulting batch files
    /// in the blob store.
    /// </summary>
    /// <param name="jobRef"></param>
    private static void DownloadSync(object jobRef)
    {
        CloudQueue jobQueue = null;
        CloudBlobContainer blobContainer = null;
        string jobId = "Unknown";
        try
        {
            // Delete the job off the queue.  We delete the job now because if we fail after this we do not want it to appear
            // again on the queue, and instead just fail the job and force the client to retry.  This simplifies the service logic
            // because we don't have to handle resuming jobs that failed in the middle.  In a production service, this might be
            // worth the extra effort.
            CloudQueueMessage job = (CloudQueueMessage)jobRef;
            jobId = job.AsString;
            jobQueue = CommonUtil.GetJobQueue();
            jobQueue.DeleteMessage(job);
            // Get the job container for the job, containing any batchfiles and job metadata
            blobContainer = CommonUtil.GetBlobContainer(jobId);
    
            CommonUtil.SyncTrace(TraceCategory.Info, "DownloadSync - Starting download job:{0}", jobId);
                    
            blobContainer.FetchAttributes();
            blobContainer.Metadata["status"] = Enum.Format(typeof(SyncJobStatus), SyncJobStatus.PreparingBatches, "g");
            blobContainer.SetMetadata();                
    
            // Get the local filesystem where we will spool our batchfiles before putting them in the blob store.
            // The limitation in SqlSyncProvider of requiring a local directory to spool to forces us to write batch
            // files twice - once to the local directory, and then when we copy them to the blob store so they can be accessed
            // by web roles.
            LocalResource fileSystem = RoleEnvironment.GetLocalResource("LS1");
    
            string localBatchingFolder = null;
            if (RoleEnvironment.DeploymentId.Contains("deployment"))
            {
                localBatchingFolder = Path.Combine(fileSystem.RootPath, ((string)jobId).Substring(0, 2));
            }
            else
            {
                localBatchingFolder = Path.Combine(fileSystem.RootPath, (string)jobId);
            }
    
            SqlSyncProvider provider = null;
            System.Data.SqlClient.SqlConnection conn = null;
    
            try
            {
                conn = new System.Data.SqlClient.SqlConnection(RoleEnvironment.GetConfigurationSettingValue("SqlAzureConnectionString"));
                provider = new SqlSyncProvider(blobContainer.Metadata["scope"], conn);
    
                Directory.CreateDirectory(localBatchingFolder);
                provider.BatchingDirectory = localBatchingFolder;
                provider.MemoryDataCacheSize = CommonUtil.MaxServiceMemoryCacheSizeInKb;  //Service side cache size.
                provider.BeginSession(Microsoft.Synchronization.SyncProviderPosition.Remote, null);
    
                // Get the knowledge from the job container
                SyncKnowledge knowledge = SyncKnowledge.Deserialize(provider.IdFormats, blobContainer.GetBlobReference("destinationKnowledge").DownloadByteArray());
                uint batchSize = Convert.ToUInt32(blobContainer.Metadata["batchsize"]);  //Client side cache size.
                BinaryFormatter bf = new BinaryFormatter();
                object changeData = null;
    
                // Get the first changebatch
                ChangeBatch cb = provider.GetChangeBatch(batchSize, knowledge, out changeData);
    
                blobContainer.Metadata.Add("isbatched", ((DbSyncContext)changeData).IsDataBatched.ToString());
                blobContainer.Metadata.Add("batchesready", "false");
    
                //Use same change batch and change data to avoid transferring them for every batch. 
                CloudBlob changeBatchblob = blobContainer.GetBlobReference("changebatch");
                using (MemoryStream ms = new MemoryStream())
                {
                    bf.Serialize(ms, cb);
                    changeBatchblob.UploadByteArray(ms.ToArray());
                }
    
                CloudBlob changeDataBlob = blobContainer.GetBlobReference("changedata");
                using (MemoryStream ms = new MemoryStream())
                {
                    bf.Serialize(ms, changeData);
                    changeDataBlob.UploadByteArray(ms.ToArray());
                }
    
                if (((DbSyncContext)changeData).IsDataBatched == true)
                {
                    // If there are multiple batches, write the first one out to blob storage, and go get the rest
                    WriteBatchFileToBlob(blobContainer, (DbSyncContext)changeData);
    
                    while (!cb.IsLastBatch)
                    {
                        cb = provider.GetChangeBatch(batchSize, knowledge, out changeData);
                        WriteBatchFileToBlob(blobContainer, (DbSyncContext)changeData);
                    }
                }
    
                // Update the blob container metadata so that the client can start downloading the batches.
                blobContainer.Metadata["batchesready"] = "true";
                blobContainer.Metadata["status"] = Enum.Format(typeof(SyncJobStatus), SyncJobStatus.Complete, "g");
                blobContainer.SetMetadata();
            }
            finally
            {
                // Delete the local directory that the provider to which the provider spooled batch files.
                if (localBatchingFolder != null && Directory.Exists(localBatchingFolder))
                {
                    Directory.Delete(localBatchingFolder, true);
                }
    
                if (provider != null)
                {
                    provider.Dispose();
                }
    
                if (conn != null)
                {
                    conn.Dispose();
                }
            }
    
            CommonUtil.SyncTrace(TraceCategory.Info, "Worker role finished preparing batches for download job:{0}", jobId);
        }
        catch (Exception ex)
        {
            // Set the job to failed if exception thrown.
            CommonUtil.SyncTrace(TraceCategory.Error, "Worker role failed to prepare batches for download job:{0}, Exception:\r\n{1}", jobId, ex);
    
            if (blobContainer != null)
            {
                blobContainer.Metadata["status"] = Enum.Format(typeof(SyncJobStatus), SyncJobStatus.Failed, "g");
                blobContainer.SetMetadata();
            }
    
            if (ex is StorageException)
            {
                StorageException storageEx = (StorageException)ex;
                CommonUtil.SyncTrace(TraceCategory.Error, "Job id: {0} StorageClient exception with extra info:\r\n{0}", jobId, storageEx.ExtendedErrorInformation);
            }
        }
        finally
        {
            lock (_jobCounterGate)
            {
                _currentJobs--;
            }
        }
    }

    public by msdn  1340  0  5  0

    CheckAutoStartJobs: The check auto start jobs.

    The check auto start jobs.
    /// <summary>
    ///   Flag to determine if this is the first auto start
    /// </summary>
    private static bool autoStarted;
    
    #endregion
    
    #region Methods
    
    /// <summary>
    /// The check auto start jobs.
    /// </summary>
    private static void CheckAutoStartJobs()
    {
        if (autoStarted)
        {
            return;
        }
        else
        {
            autoStarted = true;
        }
    
        // Wait a few seconds for the host to spin up
        Thread.Sleep(TimeSpan.FromSeconds(5));
    
        // Check for autostart
        var autoCount = ConfigurationManager.AppSettings["AutoCount"];
    
        if (string.IsNullOrWhiteSpace(autoCount))
        {
            return;
        }
    
        var values = autoCount.Split('|');
    
        // Check for valid value
        if (values.Length != 2)
        {
            return;
        }
    
        int count;
        if (!Int32.TryParse(values[0], out count))
        {
            return;
        }
    
        int delay;
        if (!Int32.TryParse(values[1], out delay))
        {
            return;
        }
    
        var request = new BatchRequest
            {
               CountTo = count, Delay = TimeSpan.FromSeconds(delay), StartAt = DateTime.Now, AutoStart = true 
            };
    
        var proxy = new BatchWorkerClient(
            new NetNamedPipeBinding(), new EndpointAddress("net.pipe://localhost/BatchWeb/BatchWorker.xamlx"));
        try
        {
            var response = proxy.SubmitJob(request);
            proxy.Close();
        }
        catch
        {
            proxy.Abort();
            throw;
        }
    }

    public by msdn  1112  0  6  0

    UploadSync: Handles upload jobs. Retrieves the batch files containing the changes that were uploaded by the client and applies them using SqlSyncProvider.

    Handles upload jobs. Retrieves the batch files containing the changes that were uploaded by the client and applies them using SqlSyncProvider.
    using System;
    using System.Collections.Generic;
    using System.Diagnostics;
    using System.Linq;
    using System.Net;
    using System.Text;
    using System.Threading;
    using Microsoft.WindowsAzure.Diagnostics;
    using Microsoft.WindowsAzure.ServiceRuntime;
    using Microsoft.WindowsAzure.StorageClient;
    using Microsoft.WindowsAzure;
    using Microsoft.Synchronization.Data.SqlServer;
    using System.IO;
    using System.Runtime.Serialization.Formatters.Binary;
    using Microsoft.Synchronization.Data;
    using Microsoft.Synchronization;
    using AzureSyncServiceCommonUtility;
    using System.Data.SqlClient;
    
    private static object _jobCounterGate = new Object();
    private static int _currentJobs = 0;
    
    /// <summary>
    /// Handles upload jobs.  Retrieves the batch files containing the changes that were uploaded by the client
    /// and applies them using SqlSyncProvider.
    /// </summary>
    /// <param name="jobRef"></param>
    private static void UploadSync(object jobRef)
    {
        CloudQueue jobQueue = null;
        CloudBlobContainer blobContainer = null;
        string jobId = "Unknown";
                
        try
        {
            // Delete the job off the queue.  We delete the job now because if we fail after this we do not want it to appear
            // again on the queue, and instead just fail the job and force the client to retry.  This simplifies the service logic
            // because we don't have to handle resuming jobs that failed in the middle.  In a production service, this might be
            // worth the extra effort.
            CloudQueueMessage job = (CloudQueueMessage)jobRef;
            jobId = job.AsString;
            jobQueue = CommonUtil.GetJobQueue();
            jobQueue.DeleteMessage(job);
            // Get the job container for the job, containing any batchfiles and job metadata
            blobContainer = CommonUtil.GetBlobContainer(jobId);
    
            CommonUtil.SyncTrace(TraceCategory.Info, "UploadSync - Starting upload job:{0}", jobId);                
                    
            blobContainer.FetchAttributes();
            blobContainer.Metadata["status"] = Enum.Format(typeof(SyncJobStatus), SyncJobStatus.ApplyingBatches, "g");
            blobContainer.SetMetadata();                
    
    
            // We have to copy the batch files locally first from the blob store before applying them, so get the local file store.
            // The limitation in SqlSyncProvider of requiring a local directory to spool from forces us to write batch
            // files twice - when the web role copies them to the blob store so they can be accessed by the worker role, and
            // then to the local directory so the provider can apply them. 
            LocalResource fileSystem = RoleEnvironment.GetLocalResource("LS1");
    
            // Get the changedata blob
            CloudBlob blob = blobContainer.GetBlobReference("changedata");
            DbSyncContext changeData;
            using (MemoryStream ms = new MemoryStream(blob.DownloadByteArray()))
            {
                BinaryFormatter bf = new BinaryFormatter();
                changeData = (DbSyncContext)bf.Deserialize(ms);
            }
    
            string localBatchingFolder = null;               
            System.Data.SqlClient.SqlConnection conn = null;
            SqlSyncProvider provider = null;
            try
            {
                if (changeData.IsDataBatched)
                {
                    if (RoleEnvironment.DeploymentId.Contains("deployment"))
                    {
                        localBatchingFolder = Path.Combine(fileSystem.RootPath, ((string)jobId).Substring(0, 2));
                    }
                    else
                    {
                        localBatchingFolder = Path.Combine(fileSystem.RootPath, (string)jobId);
                    }
                    Directory.CreateDirectory(localBatchingFolder);
                }
    
                conn = new System.Data.SqlClient.SqlConnection(RoleEnvironment.GetConfigurationSettingValue("SqlAzureConnectionString"));                  
                provider = new SqlSyncProvider(blobContainer.Metadata["scope"], conn);
    
                if (changeData.IsDataBatched)
                {
                    provider.BatchingDirectory = localBatchingFolder;
                }
    
                // Here is where the max transaction size should be set if the service is running into throttling with Sql Azure
                // provider.ApplicationTransactionSize = ???
    
                provider.BeginSession(Microsoft.Synchronization.SyncProviderPosition.Remote, null);
    
                // GetSyncBatchParameters only needs to be called because it does some setup in the provider.
                uint batchSizeNotUsed;
                SyncKnowledge knowledgeNotUsed = new SyncKnowledge();
                provider.GetSyncBatchParameters(out batchSizeNotUsed, out knowledgeNotUsed);
    
                SyncSessionStatistics syncStats = new SyncSessionStatistics();
                if (changeData.IsDataBatched)
                {
                    CloudBlobDirectory batchFilesDirectory = blobContainer.GetDirectoryReference("batchfiles");
                    IEnumerable<IListBlobItem> batchFileUris = batchFilesDirectory.ListBlobs();
    
                    CommonUtil.SyncTrace(
                        TraceCategory.Info, "Worker role is going to apply:{0} batches from blob container:{1}", batchFileUris.Count(), blobContainer.Uri); ;
                            
                    int currentIndex = 0;
                    int total = batchFileUris.Count();
                    foreach (IListBlobItem batchFileUri in batchFileUris)
                    {
                        currentIndex++;
                        CloudBlob batchFile = blobContainer.GetBlobReference(batchFileUri.Uri.AbsoluteUri);
                        changeData.IsLastBatch = false;
                        string localBatchFile = Path.Combine(localBatchingFolder, Path.GetFileName(batchFileUri.Uri.AbsolutePath));
                        changeData.BatchFileName = localBatchFile;
    
                        ChangeBatch cb = new ChangeBatch(provider.IdFormats, new SyncKnowledge(), new ForgottenKnowledge());
                        //CloudBlob batchFile = blobContainer.GetBlobReference(batches[i]);
                        // Grab the blob file and write it locally so the provider can process it
                        batchFile.DownloadToFile(localBatchFile);
    
                        if (currentIndex == total)
                        {
                            changeData.IsLastBatch = true;
                            cb.SetLastBatch();
                        }
    
                        provider.ProcessChangeBatch(ConflictResolutionPolicy.ApplicationDefined, cb, changeData, new SyncCallbacks(), syncStats);
                        CommonUtil.SyncTrace(TraceCategory.Verbose, "Worker role applied change batch {0}", Path.GetFileName(batchFileUri.Uri.AbsolutePath));
                    }
                }
                else
                {
                    ChangeBatch cb = new ChangeBatch(provider.IdFormats, new SyncKnowledge(), new ForgottenKnowledge());
                    cb.SetLastBatch();
                    provider.ProcessChangeBatch(ConflictResolutionPolicy.ApplicationDefined, cb, changeData, new SyncCallbacks(), syncStats);
                    CommonUtil.SyncTrace(TraceCategory.Verbose, "Worker role applied change batch for non-batching sync");
                }
    
                // Write stats blob
                blobContainer.Metadata.Add("changesapplied", syncStats.ChangesApplied.ToString());
                blobContainer.Metadata.Add("changesfailed", syncStats.ChangesFailed.ToString());
                blobContainer.Metadata["status"] = Enum.Format(typeof(SyncJobStatus), SyncJobStatus.Complete, "g"); ;
                blobContainer.SetMetadata();
            }
            finally
            {
                if (localBatchingFolder != null && Directory.Exists(localBatchingFolder))
                {
                    Directory.Delete(localBatchingFolder, true);
                }
    
                if (provider != null)
                {
                    provider.Dispose();
                }
    
                if (conn != null)
                {
                    conn.Dispose();
                }
            }
    
            CommonUtil.SyncTrace(TraceCategory.Info, "Worker role finished applying batches for upload job:{0}", jobId);
        }
        catch (Exception ex)
        {
            // Set the job to failed if exception thrown.
            CommonUtil.SyncTrace(
                TraceCategory.Error, "Worker role failed to apply batches for upload job:{0}, Exception:\r\n{1}", jobId, ex);
    
            if (blobContainer != null)
            {
                blobContainer.Metadata["status"] = Enum.Format(typeof(SyncJobStatus), SyncJobStatus.Failed, "g");
                blobContainer.SetMetadata();
            }
    
            if (ex is StorageException)
            {
                StorageException storageEx = (StorageException)ex;
                CommonUtil.SyncTrace(TraceCategory.Error, "Job id: {0} StorageClient exception with extra info:\r\n{0}", jobId, storageEx.ExtendedErrorInformation);
            }
        }
        finally
        {
            lock (_jobCounterGate)
            {
                _currentJobs--;
            }
        }
    }

    public by msdn  1415  0  6  0

    ListJobsAndAssets

    // List all jobs on the server, and for each job, also all assets
    using System;
    using System.Configuration;
    using System.Globalization;
    using System.IO;
    using System.Net;
    using System.Runtime.Serialization.Json;
    using System.Text;
    using System.Threading;
    using System.Threading.Tasks;
    using System.Web;
    using System.Xml;
    using System.Linq;
    using Microsoft.WindowsAzure;
    using Microsoft.WindowsAzure.MediaServices.Client;
    using Microsoft.WindowsAzure.Storage;
    using Microsoft.WindowsAzure.Storage.Blob;
    using Microsoft.WindowsAzure.Storage.Auth;
    using System.Collections.Generic;
    using System.Reflection;
    
    // Class-level field used to keep a reference to the service context.
    private static CloudMediaContext _context = null;
    
    // List all jobs on the server, and for each job, also all assets
    static void ListJobsAndAssets()
    {
        string waitMessage = "Building the list. This may take a few "
            + "seconds to a few minutes depending on how many assets "
            + "you have."
            + Environment.NewLine + Environment.NewLine
            + "Please wait..."
            + Environment.NewLine;
        Console.Write(waitMessage);
    
        // Create a Stringbuilder to store the list that we build. 
        StringBuilder builder = new StringBuilder();
    
        foreach (IJob job in _context.Jobs)
        {
            // Display the collection of jobs on the server.
            builder.AppendLine("");
            builder.AppendLine("******JOB*******");
            builder.AppendLine("Job ID: " + job.Id);
            builder.AppendLine("Name: " + job.Name);
            builder.AppendLine("State: " + job.State);
            builder.AppendLine("Order: " + job.Priority);
            builder.AppendLine("==============");
    
    
            // For each job, display the associated tasks (a job  
            // has one or more tasks). 
            builder.AppendLine("******TASKS*******");
            foreach (ITask task in job.Tasks)
            {
                builder.AppendLine("Task Id: " + task.Id);
                builder.AppendLine("Name: " + task.Name);
                builder.AppendLine("Progress: " + task.Progress);
                builder.AppendLine("Configuration: " + task.Configuration);
                if (task.ErrorDetails != null)
                {
                    builder.AppendLine("Error: " + task.ErrorDetails);
                }
                builder.AppendLine("==============");
    
            }
    
            // For each job, display the list of input media assets.
            builder.AppendLine("******JOB INPUT MEDIA ASSETS*******");
            foreach (IAsset inputAsset in job.InputMediaAssets)
            {
    
                if (inputAsset != null)
                {
                    builder.AppendLine("Input Asset Id: " + inputAsset.Id);
                    builder.AppendLine("Name: " + inputAsset.Name);
                    builder.AppendLine("==============");
                }
    
            }
    
            // For each job, display the list of output media assets.
            builder.AppendLine("******JOB OUTPUT MEDIA ASSETS*******");
            foreach (IAsset theAsset in job.OutputMediaAssets)
            {
                if (theAsset != null)
                {
                    builder.AppendLine("Output Asset Id: " + theAsset.Id);
                    builder.AppendLine("Name: " + theAsset.Name);
                    builder.AppendLine("==============");
                }
            }
        }
    
        // Display output in console.
        Console.Write(builder.ToString());
    }

    public by msdn  1256  0  6  0

    DeleteJob

    // Deletes a job based on its state. // You can create a loop to call this method and delete all jobs: // foreach(IJob job in _context.Jobs) // DeleteJob(job.Id) // **Warning: if you call this job in a foreach loop as above, do so // with caution as it will try to delete all jobs in an account.
    using System;
    using System.Configuration;
    using System.Globalization;
    using System.IO;
    using System.Net;
    using System.Runtime.Serialization.Json;
    using System.Text;
    using System.Threading;
    using System.Threading.Tasks;
    using System.Web;
    using System.Xml;
    using System.Linq;
    using Microsoft.WindowsAzure;
    using Microsoft.WindowsAzure.MediaServices.Client;
    using Microsoft.WindowsAzure.Storage;
    using Microsoft.WindowsAzure.Storage.Blob;
    using Microsoft.WindowsAzure.Storage.Auth;
    using System.Collections.Generic;
    using System.Reflection;
    
    // Deletes a job based on its state.
    // You can create a loop to call this method and delete all jobs:   
    // foreach(IJob job in _context.Jobs)
    //       DeleteJob(job.Id)
    // **Warning:  if you call this job in a foreach loop as above, do so  
    // with caution as it will try to delete all jobs in an account. 
    static void DeleteJob(string jobId)
    {
        bool jobDeleted = false;
    
        while (!jobDeleted)
        {
            // Get an updated job reference.  
            IJob job = GetJob(jobId);
    
            // Check and handle various possible job states. You can 
            // only delete a job whose state is Finished, Error, or Canceled.   
            // You can cancel jobs that are Queued, Scheduled, or Processing,  
            // and then delete after they are canceled.
            switch (job.State)
            {
                case JobState.Finished:
                case JobState.Canceled:
                case JobState.Error:
                    // Job errors should already be logged by the StateChanged event 
                    // handling method.
                    // You can also call job.DeleteAsync to do async deletes.
                    job.Delete();
                    Console.WriteLine("Job has been deleted.");
                    jobDeleted = true;
                    break;
                case JobState.Canceling:
                    Console.WriteLine("Job is cancelling and will be deleted "
                        + "when finished.");
                    Console.WriteLine("Wait while job finishes canceling...");
                    Thread.Sleep(5000);
                    break;
                case JobState.Queued:
                case JobState.Scheduled:
                case JobState.Processing:
                    job.Cancel();
                    Console.WriteLine("Job is scheduled or processing and will "
                        + "be deleted.");
                    break;
                default:
                    break;
            }
    
        }
    }
    • Public Snippets
    • Channels Snippets