Search
Recent Tweets

Entries in asp.net mvc (6)

Friday
May242013

ASP.NET NLog Sql Server Logging and Error Handling Part 2

Be sure to check out Part 1 as this post builds upon it and the two go hand in hand.

Series Overview

Part 1 - Setting up logging with ASP.NET MVC, NLog and SQL Server

Part 2 - Unhandled exception processing, building an error report, emailing errors, and custom error pages.

Custom Error Handling Attribute

Added in FilterConfig.RegisterGlobalFilters and bound in DiagnosticModule, AppErrorHandlerAttribute invokes reporting the unhandled exception and setting the error view to be displayed to the end user. An enableErrorPages appSetting controls whether any of this is done; for local debugging or a dev web server having this off might be desirable.
namespace NLogSql.Web.Infrastructure.ErrorHandling
{
    public class AppErrorHandlerAttribute : FilterAttribute, IExceptionFilter
    {
        [Inject]
        public IErrorReporter Reporter { get; set; }

        public void OnException(ExceptionContext exceptionContext)
        {
            if (exceptionContext.ExceptionHandled) return;

            if (ConfigurationManager.AppSettings["enableErrorPages"] == "false")
            {
                AppLogFactory.Create<AppErrorHandlerAttribute>().Error(
                    "Unexpected error. enableErrorPages is false, skipping detailed "
					+ "error gathering. Error was: {0}",
                    exceptionContext.Exception.ToString());
                return;
            }

            Ensure.That(Reporter, "Reporter").IsNotNull();
            Reporter.ReportException(exceptionContext);

            SetErrorViewResult(exceptionContext);
        }

        private static void SetErrorViewResult(ExceptionContext exceptionContext)
        {
            var statusCode = new HttpException(null, exceptionContext.Exception)
				.GetHttpCode();

            exceptionContext.Result = new ViewResult
            {
                ViewName = MVC.Shared.Views.ViewNames.Error,
                TempData = exceptionContext.Controller.TempData,
                //ViewData = new ViewDataDictionary<ErrorModel>(new ErrorModel())
            };

            exceptionContext.ExceptionHandled = true;
            exceptionContext.HttpContext.Response.Clear();
            exceptionContext.HttpContext.Response.StatusCode = statusCode;
            exceptionContext.HttpContext.Response.TrySkipIisCustomErrors = true;
        }
    }
}

Logging and Reporting the Error

In the same namespace the ErrorReporter class invokes generation of an error report and logs and emails the error report. The overload with customActivityMessage would generally be used with handled exceptions where it may still be desirable to report the exception in cases.
public class ErrorReporter : IErrorReporter
{
	private readonly ILog _log;

	public ErrorReporter(ILog log)
	{
		_log = Ensure.That(log, "log").IsNotNull().Value;
	}

	private string CustomActivityMessage { get; set; }

	public void ReportException(ControllerContext controllerContext, 
		Exception exception, string customActivityMessage = null)
	{
		this.CustomActivityMessage = customActivityMessage;
		ReportException(new ExceptionContext(controllerContext, exception));
	}

	public void ReportException(ExceptionContext exceptionContext)
	{
		var errorInfo = new ErrorReportInfo(exceptionContext, this.CustomActivityMessage);
		errorInfo.Generate();
		_log.Error("Unexpected error: {0}", errorInfo.ReportText);

		if (errorInfo.Errors.Any())
			_log.Error("Error generating error report. Original exception: {0}", 
				exceptionContext.Exception);

		// sending mail can be a little slow, don't delay end user seeing error page
		Task.Factory.StartNew(
		state =>
		{
			var errorReport = (ErrorReportInfo)state;
			DependencyResolver.Current.GetService<IErrorEmailer>()
				.SendErrorEmail(errorReport);
		},
		errorInfo).ContinueWith(t =>
		{
			if (null != t.Exception)
				_log.Error("Error sending email: " + t.Exception.ToString());    
		});
	}
}

Building the Error Report

Various diagnostic info classes are responsible for building different diagnostic "sub reports". Each inherits from DiagnosticInfoBase which is a glorified StringBuilder, with functionality to build both a plain text version of the report (logged to DB) as well as an HTML formatted version (used for emails).

The base class has safe appending and formatting functionality and ensures that any error in generating a part of the report doesn't stop the whole process.
A sample implementation:
public class FormInfo : DiagnosticInfoBase
{
	private readonly HttpRequestBase _request;

	public FormInfo(HttpRequestBase request)
	{
		_request = request;
	}

	protected override void GenerateReport()
	{
		StartTable();
		var keys = _request.Form.AllKeys.OrderBy(s => s).ToList();

		foreach (var name in keys)
		{
			var value = _request.Form[name];

			if (null != value && name.Contains("password", StringComparison.OrdinalIgnoreCase))
			{
				value = new string('*', value.Length);
			}

			AppendRow(name, value);
		}

		EndTable();
	}
}
The ErrorReportInfo class combines the output of each section for the full error report.



In the end this produces a sample HTML email report like this.

Sending the Email

Sending the email logs the result on failure or success,
public interface IErrorEmailer
{
	void SendErrorEmail(ErrorReportInfo errorInfo);
}

public class ErrorEmailer : IErrorEmailer
{
	private readonly IMailer _mailer;
	private readonly ILog _log;

	public ErrorEmailer(IMailer mailer, ILog log)
	{
		_mailer = Ensure.That(mailer, "mailer").IsNotNull().Value;
		_log = Ensure.That(log, "log").IsNotNull().Value;
	}

	public void SendErrorEmail(ErrorReportInfo errorInfo)
	{
		try
		{
			var subject = string.Format("{0} Error", 
				Assembly.GetExecutingAssembly().GetName().Name);

			if (null != errorInfo.Server && null != errorInfo.Location
				&& !string.IsNullOrWhiteSpace(errorInfo.Location.ControllerAction)
				&& !string.IsNullOrWhiteSpace(errorInfo.Server.HostName))
			{
				subject = string.Format("{0}: {1} - {2}", subject, 
					errorInfo.Server.HostName, errorInfo.Location.ControllerAction);
			}

			var to = AppSettings.Default.Email.ErrorMessageTo;
			_mailer.SendMail(to, subject, errorInfo.ReportHtml);

			_log.Info("Sent email: {0} to {1}", subject, to);
		}
		catch (Exception ex)
		{
			_log.Error("Error sending error report email: {0}", ex);
		}
	}
}

The Error View

At the end of AppErrorHandlerAttribute the SetErrorViewResult invoked the Views\Shared\Error.cshtml page. According to the HTTP status code, content such as text, images and styles differ. Separate error pages may have more advantages if there are a larger number of differences; a single page was less work here.
@section styles{
    <link rel="stylesheet" href="~/Content/Error.css"/>
}

@{
    var statusTitleMap = new Dictionary<int, string> {
             {404, "Something got lost in the shuffle"},
             {410, "Gone like yesterday"},
             {500, "Something go boom"}
         };
}

@section hero{
    <div id="error-body" class="container-fluid">
        <div class="row-fluid">
            <div class="span3">
                @{ var errorClass = (Response.StatusCode == 404 || Response.StatusCode == 410) ? "_" + Response.StatusCode : "_500";}
                <div id="errorImageBlock" class="@errorClass"></div>
            </div>
    
            <div class="span8 offset1">
                <div class="row-fluid">
                    <div class="span12 text-center" id="status-code">
                        @Response.StatusCode
                    </div>
                </div>
                <div class="row-fluid">
                    <div class="span12" id="well-this">
                        @statusTitleMap[Response.StatusCode]
                    </div>
                </div>
        
                <div class="row-fluid">
                    <div class="span11" id="message">
                    @switch (Response.StatusCode)
                    {
                        case 404:
                            @:We cannot find the page you are looking for. If you typed in the address, double check the spelling. If you got here by clicking a link, 
                            @: <a href="mailto:customerservice@domain.com?subject=Page%20Not%20Found">let us know</a>.
                            break;
                        case 410:
                            @:The page you are looking for is gone (permanently). If you feel you reached this page incorrectly, <a href="mailto:customerservice@domain.com?subject=Link%20Gone">let us know</a>.
                            break;
                        case 500:
                            @:Oh dear, something's gone wrong. Our team has already been alerted to the problem and will fix it as soon as possible! 
                            break;
                    }  
                    </div>
                </div>
            </div>
        </div>
    </div>
}

Testing Errors

At the bottom of the home page are some links to test out error functionality.





Handling Not Found and Gone Permanently

404s have some special handling in Global.asax.cs:
protected void Application_EndRequest()
{
	if (Context.Response.StatusCode == 404)
	{
		if (Request.RequestContext.RouteData.Values["fake404"] == null)
		{
			Response.Clear();

			var rd = new RouteData();
			rd.Values["controller"] = MVC.Error.Name;
			rd.Values["action"] = MVC.Error.ActionNames.NotFound;

			var c = (IController)DependencyResolver.Current.GetService<ErrorController>();
			Request.RequestContext.RouteData = rd;
			c.Execute(new RequestContext(new HttpContextWrapper(Context), rd));
		}
	}
}
That code makes me itch a bit and could be done better but there is a reason for it. If you're wondering why not just use custom error pages, the answer is that doing so for a 404 page ends up producing a 301 redirect to then 404 on the custom not found page. For SEO purposes that is usually considered a bad practice. If you don't have a public site or as much SEO concern, that may be acceptable but in this case it wasn't.

Error Controller

The error controller sets and logs the response status codes and returns the error view. For the Gone action, usually there'd be routes defined for legacy URLs that would direct to that action.
public partial class ErrorController : Controller
{
	private readonly ILog _log;

	public ErrorController(ILog log)
	{
		_log = Ensure.That(log, "log").IsNotNull().Value;
	}

	public virtual ActionResult NotFound()
	{
		Response.StatusCode = (int)HttpStatusCode.NotFound;
		RouteData.Values["fake404"] = true;
		_log.Write(LogType.Warn, new { Code = "404" }, 
			"404 Not Found for {0}", Request.Url);
		return View("Error");
	}

	public virtual ActionResult Gone()
	{
		Response.StatusCode = (int)HttpStatusCode.Gone;
		Response.Status = "410 Gone";
		Response.TrySkipIisCustomErrors = true;
		_log.Write(LogType.Warn, new {Code = "410"}, 
			"410 gone permanently for {0}", Request.Url);
		return View("Error");
	}
}

Future Enhancements

  • The diagnostic info report building classes are pretty quick and messy in spots and could use cleanup.
  • Error view work including better responsive design and user-acceptable images :)
  • Admin error view to inspect log data and errors
  • Use and integration of apps such as appfail.net and New Relic to monitor errors and performance. This app used those tools in conjunction with the custom error handling and logging functionality.

The Code

The code for this series is available at https://github.com/thnk2wn/NLogSql.Web
Monday
May202013

ASP.NET NLog Sql Server Logging and Error Handling Part 1

It was the eleventh hour before a web app was to go live and there was nothing in place for error handling or logging. You're shocked I'm sure (that's my sarcastic voice). Normally with web apps in the past I've used ELMAH for unhandled error logging and NLog with text file targets for other logging.

ELMAH works great but in this case I was told it was pulled for some reason. With no time to find out the story there I threw together something to capture basic error details and send an email. Later that grew and was customized and ELMAH never returned though perhaps it should have. Still there are benefits of doing this yourself including more customization, less bloat, less hunting through perhaps lacking documentation etc.

On the logging front I still wanted to use NLog but text file logging was no longer a fit. This app had a few instances behind a load balancer and central logging was needed plus it needed a higher volume of logging in some troublesome workflows, more so than the occasional errors or warnings here and there. I had not used NLog with SQL Server before but this seemed like the perfect time to do so.

Series Overview

Part 1 - Setting up logging with ASP.NET MVC, NLog and SQL Server

Part 2 - Unhandled exception processing, building an error report, emailing errors, and custom error pages.

Setup and Configuration

I began by installing the NLog NuGet package and the NLog.Extended package for asp.net specific information into the ASP.NET MVC 4 project. The NLog.Config package is useful for a standalone NLog config file but for web apps with existing config transforms I find embedding the configuration in Web.config easier.

In web.config under configuration/configSections, the NLog section is defined:
	<section name="nlog" type="NLog.Config.ConfigSectionHandler, NLog" />
Under <configuration> my initial NLog config looked like the below, using the async attribute on targets to make each target work asynchronously:
<nlog xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" throwExceptions="true" 
    internalLogLevel="Debug" internalLogFile="${basedir}/NlogInternal.log">
    <!-- go to http://nlog-project.org/wiki/Configuration_file for more information -->
    <extensions>
      <add assembly="NLog.Extended" />
    </extensions>
    <targets async="true">
      <target xsi:type="Database" name="dbTarget" connectionStringName="Logger" 
	  commandText="exec sLogEvent_Insert @time_stamp, @level, @logger, @userName, @url, 
	    @machineName, @sessionId, @threadId, @referrer, @userAgent, @code, @message">
        <parameter name="@time_stamp" layout="${date}" />
        <parameter name="@level" layout="${level}" />
        <parameter name="@logger" layout="${logger}" />
        <parameter name="@userName" layout="${identity}" />
        <parameter name="@url" layout="${aspnet-request:serverVariable=Url}" />
        <parameter name="@machineName" layout="${machinename}" />
        <parameter name="@sessionId" layout="${aspnet-sessionid}" />
        <parameter name="@threadId" layout="${threadid}" />
        <parameter name="@referrer" 
			layout="${aspnet-request:serverVariable=HTTP_REFERER}" />
        <parameter name="@userAgent" 
			layout="${aspnet-request:serverVariable=HTTP_USER_AGENT}" />
		<parameter name="@code" layout="${event-context:item=Code}" />
        <parameter name="@message" layout="${message}" />
      </target>
      <target name="debugTarget" xsi:type="Debugger" 
		layout="${time}|${level:uppercase=true}|${logger}|${message}" />
    </targets>
    <rules>
      <!-- Levels: Off, Trace, Debug, Info, Warn, Error, Fatal -->
      <logger name="*" minlevel="Trace" writeTo="debugTarget,dbTarget" />
    </rules>
</nlog>
Later I swapped to explicit use of AsyncWrapper targets as it allowed me to control the behavior more explicitly and mirrored the reality at runtime, at the cost of being more verbose.
<nlog xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" throwExceptions="true" 
    internalLogLevel="Debug" internalLogFile="${basedir}/NlogInternal.log">
    <!-- go to http://nlog-project.org/wiki/Configuration_file for more information -->
    <extensions>
      <add assembly="NLog.Extended" />
    </extensions>
    <targets>
      <target name="asyncDbWrapperTarget" xsi:type="AsyncWrapper" queueLimit="10000" 
	  timeToSleepBetweenBatches="50" batchSize="100" overflowAction="Block">
        <target xsi:type="Database" name="dbTarget" connectionStringName="Logger" 
		commandText="exec sLogEvent_Insert @time_stamp, @level, @logger, @userName, @url, 
		@machineName, @sessionId, @threadId, @referrer, @userAgent, @code, @message">
          <parameter name="@time_stamp" layout="${date}" />
          <parameter name="@level" layout="${level}" />
          <parameter name="@logger" layout="${logger}" />
          <parameter name="@userName" layout="${identity}" />
          <parameter name="@url" layout="${aspnet-request:serverVariable=Url}" />
          <parameter name="@machineName" layout="${machinename}" />
          <parameter name="@sessionId" layout="${aspnet-sessionid}" />
          <parameter name="@threadId" layout="${threadid}" />
          <parameter name="@referrer" 
			layout="${aspnet-request:serverVariable=HTTP_REFERER}" />
          <parameter name="@userAgent" 
			layout="${aspnet-request:serverVariable=HTTP_USER_AGENT}" />
          <parameter name="@code" layout="${event-context:item=Code}" />
          <parameter name="@message" layout="${message}" />
        </target>
      </target>
      <target name="asyncDebugWrapperTarget" xsi:type="AsyncWrapper" queueLimit="10000" 
	  timeToSleepBetweenBatches="50" batchSize="100" overflowAction="Block">
        <target name="debugTarget" xsi:type="Debugger" 
			layout="${time}|${level:uppercase=true}|${logger}|${message}" />
      </target>
    </targets>
    <rules>
      <!-- Levels: Off, Trace, Debug, Info, Warn, Error, Fatal -->
      <logger name="*" minlevel="Trace" 
		writeTo="asyncDebugWrapperTarget,asyncDbWrapperTarget" />
    </rules>
 </nlog>
The database target's connectionStringName value of Logger points to the DB connection string to use.
<connectionStrings>
<add name="Logger" providerName="System.Data.SqlClient" 
   connectionString="Data Source=(local);Initial Catalog=Chinook.Logs;Persist Security Info=True;User ID=ChinookLogger;Password=L0gger!" />
</connectionStrings>

The Database

Our DBA setup a separate database for logging data to set DB properties differently as log data and transactional app data are different beasts. The sample app for this post is written against the Chinook sample database so I named the log database Chinook.Logs. The primary table script follows, minus the indexes, SET statements and the like.
CREATE TABLE [dbo].[LogEvent](
	[LogId] [int] IDENTITY(1,1) NOT NULL,
	[LogDate] [datetime] NOT NULL,
	[EventLevel] [nvarchar](50) NOT NULL,
	[LoggerName] [nvarchar](500) NOT NULL,
	[UserName] [nvarchar](50) NULL,
	[Url] [nvarchar](1024) NULL,
	[MachineName] [nvarchar](100) NOT NULL,
	[SessionId] [nvarchar](100) NULL,
	[ThreadId] [int] NULL,
	[Referrer] [nvarchar](1024) NULL,
	[UserAgent] [nvarchar](500) NULL,
	[Code] [nvarchar](10) NULL,
	[LogMessage] [nvarchar](max) NOT NULL,
	[PartitionKey] [tinyint] NOT NULL,
PRIMARY KEY CLUSTERED 
(
	[LogId] ASC,
	[PartitionKey] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON)
)
The table is pretty straightforward with perhaps a couple of exceptions. The Code field was to be used for custom error codes (HTTP or otherwise) or "tagging" log statements. PartitionKey was added by our DBA mainly for archiving log data but also for efficient querying by day; more on this to come. Finally I originally had the string columns as varchar but we found that NLog used nvarchar and a conversion was happening with each insert that had a slight performance impact.

The insert sproc grabs the day of the week and inserts that as the partition key and the rest of the data just passes through.
CREATE PROCEDURE [dbo].[sLogEvent_Insert]
	@time_stamp datetime,
	@level nvarchar(50),
	@logger nvarchar(500),
	@userName nvarchar(50),
	@url nvarchar(1024),
	@machineName nvarchar(100),
	@sessionId nvarchar(100),
	@threadId int,
	@referrer nvarchar(1024),
	@userAgent nvarchar(500),
	@code nvarchar(10),
	@message nvarchar(max)
AS
BEGIN
	SET NOCOUNT ON;
	Declare @currentDate Datetime
	declare @partitionKey tinyint
	
	set		@currentDate = getdate()
	set		@partitionKey = DATEPART(weekday, @currentDate)

	INSERT INTO [dbo].[LogEvent]
           ([LogDate]
           ,[EventLevel]
           ,[LoggerName]
           ,[UserName]
           ,[Url]
           ,[MachineName]
           ,[SessionId]
           ,[ThreadId]
           ,[Referrer]
           ,[UserAgent]
		   ,[Code]
           ,[LogMessage]
		   ,[PartitionKey])
     VALUES
           (@time_stamp
           ,@level
           ,@logger
           ,@userName
           ,@url
           ,@machineName
           ,@sessionId
           ,@threadId
           ,@referrer
           ,@userAgent
		   ,@code
           ,@message
		   ,@partitionKey);
END
An identical log table was created but with the name LogEvent_Switched. The cleanup sproc was created by our DBA:
CREATE PROCEDURE [DBA].[WeekdayPartitionCleanup_PartitionSwitching] 
AS
BEGIN
	SET NOCOUNT ON;
	declare @partitionKey int
	declare @SQLCommand nvarchar(1024)
	truncate table [Chinook.Logs].dbo.LogEvent_Switched;

	set @partitionKey = datePart(weekday, getdate()) + 1
	if(@partitionkey >7)
		set @partitionKey = 1

	set @SQLCommand = 'alter table [Chinook.Logs].dbo.LogEvent switch partition  ' 
	  + cast(@partitionKey as varchar) + ' to [Chinook.Logs].dbo.LogEvent_Switched;'
	exec sp_executesql @SQLCommand
END
That was scheduled to run daily. So if today is Friday, datePart(weekday, getdate()) returns 6, +1 is Saturday so all of last Saturday's log records get automagically switched over to the LogEvent_Switched table. This leaves the last 6 days of log records in LogEvent and the 7th day in LogEvent_Switched. If you don't have the Enterprise edition and can't use partitioning, regular deletes may work but could be problematic when deleting a large number of rows during frequent inserts or selects.

Finally permission to execute the sproc was granted and then it was back to .net land.
GRANT EXECUTE on dbo.sLogEvent_Insert to ChinookLogger;

Wiring up the Logging Code

First an interface that mostly mirrors NLog's Logger class.
namespace NLogSql.Web.Infrastructure.Diagnostics.Logging
{
    public interface ILog
    {
        void Debug(string format, params object[] args);
        void Error(string format, params object[] args);
        void Fatal(string format, params object[] args);
        void Info(string format, params object[] args);
        void Trace(string format, params object[] args);
        void Warn(string format, params object[] args);

        // custom
        void Write(LogType type, object properties, string message, params object[] args);

        bool IsDebugEnabled { get; }
        bool IsErrorEnabled { get; }
        bool IsFatalEnabled { get; }
        bool IsInfoEnabled { get; }
        bool IsTraceEnabled { get; }
        bool IsWarnEnabled { get; }
    }
}
The functionality of the Log class is provided mostly by inheriting from NLog's Logger class. One custom method is defined to allow passing custom log properties much like you would in ASP.NET (new {code = "404"}).
using System.ComponentModel;
using System.Globalization;
using NLog;

namespace NLogSql.Web.Infrastructure.Diagnostics.Logging
{
    public class Log : Logger, ILog
    { 
        public void Write(LogType type, object properties, string message, 
			params object[] args)
        {
            var info = new LogEventInfo(LogLevel.FromOrdinal((int)type), Name, 
				CultureInfo.CurrentCulture, message, args);

            if (null != properties)
            {
                foreach (PropertyDescriptor propertyDescriptor 
					in TypeDescriptor.GetProperties(properties))
                {
                    var value = propertyDescriptor.GetValue(properties);
                    info.Properties[propertyDescriptor.Name] = value;
                }
            }

            Log(info);
        }
    }

    public enum LogType
    {
        Trace, Debug, Info, Warn, Error, Fatal
    }
}
A log instance is typically created via injecting ILog into the constructor of the class needing logging. Here I use Ninject to wire this up.
using System;
using System.Linq;
using System.Web.Mvc;
using NLog;
using NLogSql.Web.Infrastructure.Diagnostics.Logging;
using NLogSql.Web.Infrastructure.ErrorHandling;
using Ninject.Activation;
using Ninject.Modules;
using Ninject.Web.Mvc.FilterBindingSyntax;

namespace NLogSql.Web.DI
{
    public class DiagnosticsModule : NinjectModule
    {
        public override void Load()
        {
            Bind<ILog>().ToMethod(CreateLog);
            Bind<IErrorReporter>().To<ErrorReporter>();
            Bind<IEventLogWriter>().To<EventLogWriter>();
            Bind<IErrorEmailer>().To<ErrorEmailer>();

            Kernel.BindFilter<AppErrorHandlerAttribute>(FilterScope.Controller, 0);
        }

        private static ILog CreateLog(IContext ctx)
        {
            var p = ctx.Parameters.FirstOrDefault(x => 
				x.Name == LogConstants.LoggerNameParam);
            var loggerName = (null != p) ? p.GetValue(ctx, null).ToString() : null;

            if (string.IsNullOrWhiteSpace(loggerName))
            {
                if (null == ctx.Request.ParentRequest)
                {
                    throw new NullReferenceException(
                        "ParentRequest is null; unable to determine logger name; " 
						+ "if not injecting into a ctor a parameter for the "
						+ "logger name must be provided");
                }

                var service = ctx.Request.ParentRequest.Service;
                loggerName = service.FullName;
            }

            var log = (ILog)LogManager.GetLogger(loggerName, typeof(Log));
            return log;
        }
    }
}
In some cases we can't easily do ctor injection - for example in the application class of Global.asax.cs or in a filter attribute. For these exception cases AppLogFactory is used to create an instance of ILog.
using System;
using System.Diagnostics;
using System.Web.Mvc;
using Ninject;
using Ninject.Parameters;

namespace NLogSql.Web.Infrastructure.Diagnostics.Logging
{
    /// <summary>
    /// Creates a log object for those instances where one cannot be injected (i.e. app startup).
    /// Generally you should just ctor inject ILog
    /// </summary>
    public static class AppLogFactory
    {
        public static ILog Create()
        {
            var declaringType = new StackTrace(1, false).GetFrame(1)
				.GetMethod().DeclaringType;

            if (declaringType != null)
            {
                var loggerName = declaringType.FullName;
                return Create(loggerName);
            }

            throw new InvalidOperationException(
				"Could not determine declaring type; specify logger name explicitly");
        }

        public static ILog Create<T>()
        {
            return Create(typeof(T).FullName);
        }

        public static ILog Create(string loggerName)
        {
            var log = Kernel.Get<ILog>(
				new ConstructorArgument(LogConstants.LoggerNameParam, loggerName));
            return log;
        }

        private static IKernel Kernel
        {
            get
            {
                return DependencyResolver.Current.GetService<IKernel>();
            }
        }
    }

    public class LogConstants
    {
        public const string LoggerNameParam = "loggerName";
    }
}

Exercising the Log

First a simple test, injecting ILog into a controllor ctor, ensuring it isn't null using the Ensure.That NuGet package and logging a count around a database fetch.
public partial class HomeController : Controller
{
	private readonly IMappingService _mappingService;
	private readonly ILog _log;
	private readonly IMusicService _musicService;

	public HomeController(IMusicService musicService, ILog log, IMappingService mappingService)
	{
		_mappingService = Ensure.That(mappingService, "mappingService").IsNotNull().Value;
		_log = Ensure.That(log, "log").IsNotNull().Value;
		_musicService = Ensure.That(musicService, "musicService").IsNotNull().Value;
	}
	
	public virtual ActionResult Index()
	{
		_log.Debug("Retrieving genres");
		var genres = GetGenres().Result;
		_log.Info("Retrieved {0} music genres from the database", genres.Count);
		// ...
	}
}
To get a little more value, an action filter to log each controller action executed along with the execution time.
public class ActionTrackerAttribute : ActionFilterAttribute
{
	private Stopwatch Watch { get; set; }
	private ILog Log { get; set; }
	private ActionExecutingContext FilterContext { get; set; }

	private string ActionName
	{
		get { return FilterContext.ActionDescriptor.ActionName; }
	}

	private string ControllerName
	{
		get { return FilterContext.ActionDescriptor.ControllerDescriptor.ControllerName; }
	}

	private Uri Url
	{
		get { return FilterContext.RequestContext.HttpContext.Request.Url; }
	}

	public override void OnActionExecuting(ActionExecutingContext filterContext)
	{
		base.OnActionExecuting(filterContext);

		try
		{
			FilterContext = filterContext;
			Log = AppLogFactory.Create<ActionTrackerAttribute>();
			Log.Trace("Executing {0}.{1}", ControllerName, ActionName);
			Watch = Stopwatch.StartNew();
		}
		catch (Exception ex)
		{
			Trace.WriteLine(ex);
		}
	}

	public override void OnResultExecuted(ResultExecutedContext filterContext)
	{
		base.OnResultExecuted(filterContext);

		try
		{
			Watch.Stop();
			Log.Info("Executed {0}.{1} for {2} in {3:##0.000} second(s)", 
				ControllerName, ActionName, Url, Watch.Elapsed.TotalSeconds);
		}
		catch (Exception ex)
		{
			Trace.WriteLine(ex);
		}
	}
}

Troubleshooting Failed Logging

When setting up logging initially or making changes later, logging might not work. Usually I'd first fire up SQL Server Profiler and watch for calls to the log insert sproc. If I didn't see them then I'd know it's probably a configuration issue. In the nlog tag in the config file I set throwExceptions="true" internalLogLevel="Debug" internalLogFile="${basedir}/NlogInternal.log" but in practice that rarely seemed to do anything; turning off async temporarily may further help troubleshoot.

First I'd check when creating the log if the log levels were enabled as expected from the config.


Next I'd check the configuration in code with a small class, also useful for changing config at runtime.
public class LogManagement
{
	public static AsyncTargetWrapper GetAsyncDbWrapperTarget()
	{
		var target = (AsyncTargetWrapper)LogManager.Configuration
			.FindTargetByName("asyncDbWrapperTarget");
		return target;
	}
}
From there I'd further inspect the configuration to see if everything looks okay.


If I did see the insert sproc call in SQL Server Profiler then I'd check DB setup and permissions and grab the SQL from Profiler...


... and paste into a query window, then format the Sql (I use Poor Man's TSql Formatter) and execute to check for errors.


Evaluating Async Logging

Because I'm paranoid, OCD, detailed and curious I wanted to ensure the async behavior and compare to non-async. On the home page I added a link to generate batch log records, corresponding to this controller action.
public virtual ActionResult BatchTest()
{
	var sw = Stopwatch.StartNew();
	const int count = 10000;
	for (var i = 0; i < count; i++)
	{
		_log.Trace("Testing {0}", i);
	}
	sw.Stop();
	var msg = string.Format("{0} log invokes in {1:##0.000} seconds", count, 
		sw.Elapsed.TotalSeconds);
	_log.Info(msg);

	return new ContentResult { Content = msg };
}
The async result:


And the non-async result:


Evaluating Log Queries

Usually when inspecting log records I'd select just those columns I'm interested in and order by LogId DESC.


Once enough log data was there it was helpful to query on the columns that would typically be filtered on and evaluate the execution plan and response times. From there indexes were added as needed for columns such as EventLevel, URL, UserName, etc. and execution times compared afterwards.

Preparing for Deployment

In Web.Config.Release NLog config transformations are performed to change the log level from Trace to Info, to remove debugger output, and to tone down NLog internal issue reporting.
<nlog xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xdt:Transform="SetAttributes"
        throwExceptions="false" internalLogLevel="Warn" 
		internalLogFile="${basedir}/NlogInternal.log">
    <rules>
       <!-- Levels: Off, Trace, Debug, Info, Warn, Error, Fatal -->
       <logger name="*" minlevel="Info" writeTo="asyncDbWrapperTarget" 
	      xdt:Transform="SetAttributes" xdt:Locator="Match(name)" />
    </rules>
</nlog>
Slow Cheetah preview:
Note that removing the nlog xsd attribute in the root web.config (and all transforms) was needed for the transformation to work correctly. Also at one point I tried a replace transform to replace the entire nlog tag but found it didn't work correctly and it created too much duplicate content between configurations.

Some Queries


Everything today:
SELECT *
FROM LogEvent WITH (NOLOCK)
WHERE PartitionKey = (
		SELECT DATEPART(weekday, getdate())
		)
ORDER BY logid DESC;
Errors and warnings in the past 2 hours:
SELECT *
FROM LogEvent WITH (NOLOCK)
WHERE eventlevel IN (
		'Error'
		,'Warn'
		)
	AND DATEDIFF(hh, LogDate, GETDATE()) <= 2
ORDER BY logid DESC;
Errors by day and url:
SELECT cast(logdate AS DATE) [Day]
	,url
	,count(*) ErrorCount
FROM LogEvent WITH (NOLOCK)
WHERE eventlevel = 'Error'
	AND len(url) > 0
GROUP BY cast(logdate AS DATE)
	,url
ORDER BY Day DESC
	,Url;
Errors by logger name / class name / component area:
SELECT LoggerName
	,count(*) AS ErrorCount
FROM LogEvent WITH (NOLOCK)
WHERE EventLevel = 'Error'
GROUP BY LoggerName
ORDER BY count(*) DESC;
Unhandled exceptions today:
SELECT *
FROM LogEvent WITH (NOLOCK)
WHERE PartitionKey = (
		SELECT DATEPART(weekday, getdate())
		)
	AND eventlevel = 'Error'
	AND LoggerName IN ('NLogSql.Web.Infrastructure.ErrorHandling.IErrorReporter')
ORDER BY logid DESC;

Future Enhancements / Considerations

  • Web view to read, search, filter log maybe with some SignalR goodness
  • AppId type column if logging from multiple apps w/o desire for multiple log databases
  • Possibly a separate error table that the log table could link to for errors with the full error report split up

The Code

The code for this series is available at https://github.com/thnk2wn/NLogSql.Web
Thursday
May032012

Scripting NuGet Package Updates (ASP.NET MVC 4)

After creating an ASP.NET MVC 4 web app I noticed in the Visual Studio Package Manager that there were several NuGet packages that had updates available. My first thought was just to update them from the dialog while I was there. That has a few problems though:
  • You have to update packages one at a time
  • You cannot specify any of the options from the dialog that you could from the console
  • Updates will need to be performed again later and why repeat a manual process

Problems Updating Packages

Whether updating NuGet packages from the dialog or the Package Manager Console, there were a couple of issues I ran into:
  1. Modified files - I received some warnings such as "Skipping 'Scripts\jquery.validate.js' because it was modified" so I was not getting full updates. I did not actually modify any of the files NuGet indicated were modified but I guess Visual Studio touched them in some manner.
  2. No -reinstall or -force options - This relates to the modified files problem but at times it is necessary to uninstall and reinstall NuGet packages to update them correctly. I've requested a -reinstall option before and that is tracked as an issue to address. Additionally a -force option is coming to Update-Package in NuGet 1.9 so that could help later.
  3. Source control - We use TFS source control (insert look of pain here) and it freaked out at times when updating packages. For example:
    This was worked around by breaking up some checkins.
  4. Package dependency conflicts - Sometimes one package cannot be updated because another package depends on a different version of the same package. I ran into this problem trying to upgrade Json.Net but could not because a RavenDB package depended on an older version. This isn't something that can be automated per se but something to be aware of.

Package Awareness

There are a good number of packages used by default in an ASP.NET MVC 4 web app. I wanted to ensure I understood the packages in use and the dependencies between them. One tool that I had forgotten about is the NuGet Package Visualizer which came in handy for visualizing the relationships.

Creating a script

To address these items I created a PowerShell script to be included in the web project with functions that can be called from the Package Manager Console to automate some of the NuGet work.



The below script is a first pass attempt that I am sure will be revised later but it seems to be working so far. On CodePlex I previously saw a more generic PowerShell script to more forcibly update all packages listed in packages.config. I cannot seem to find that at the moment but that is certainly a valid option. For now I want to be very explicit about each package to update.
# Functions to uninstall, install, re-install, and update NuGet packages used by this web app project

# Example usage from Visual Studio Package Manager Console:
#     cd "$(ProjectDir)\_Tools"  # replace $(ProjectDir)
#     . .\Update-Packages.ps1
#     dir function:
#     ReInstall-AllPackages

$updatePackagesProjectName = "MyApp.Web"

# The problem with Update-Package is there is currently no -Force option (coming in 1.9)
# which means errors like "Skipping 'Scripts\jquery.validate.js' because it was modified"
function ReInstall-AllPackages
{	
   	UnInstall-AllPackages
    Install-AllPackages
}

# Just doing outside-in at top level with -RemoveDependencies
#   See also: http://docs.nuget.org/docs/workflows/package-visualizer
function UnInstall-AllPackages
{
    Uninstall-Package -force -RemoveDependencies -ProjectName $updatePackagesProjectName System.Web.Providers 
    Uninstall-Package -force -RemoveDependencies -ProjectName $updatePackagesProjectName jQuery.Validation.Unobtrusive
    Uninstall-Package -force -RemoveDependencies -ProjectName $updatePackagesProjectName jQuery.Ajax.Unobtrusive
    Uninstall-Package -force -RemoveDependencies -ProjectName $updatePackagesProjectName jQuery.UI.Combined
    Uninstall-Package -force -RemoveDependencies -ProjectName $updatePackagesProjectName Microsoft.Web.Optimization
    Uninstall-Package -force -RemoveDependencies -ProjectName $updatePackagesProjectName AspNetWebApi
    Uninstall-Package -force -RemoveDependencies -ProjectName $updatePackagesProjectName knockoutjs
    Uninstall-Package -force -RemoveDependencies -ProjectName $updatePackagesProjectName AspNetMvc
    Uninstall-Package -force -RemoveDependencies -ProjectName $updatePackagesProjectName Modernizr    
}

# doing more inside-out here so we get the latest version of core things like jQuery
# various packages may work with much older version of some core libraries but want latest
# being more explicit here then is needed
#   See also: http://docs.nuget.org/docs/workflows/package-visualizer
function Install-AllPackages
{
    # jQuery related
    Install-Package -ProjectName $updatePackagesProjectName jQuery
    Install-Package -ProjectName $updatePackagesProjectName jQuery.Validation
    Install-Package -ProjectName $updatePackagesProjectName jQuery.Validation.Unobtrusive
    Install-Package -ProjectName $updatePackagesProjectName jQuery.Ajax.Unobtrusive
    Install-Package -ProjectName $updatePackagesProjectName jQuery.UI.Combined
    
    # SQL related
    Install-Package -ProjectName $updatePackagesProjectName System.Web.Providers.Core
    Install-Package -ProjectName $updatePackagesProjectName System.Web.Providers 
    
    # optimization (bundles etc.)
    Install-Package -ProjectName $updatePackagesProjectName -Pre Microsoft.Web.Optimization 
        
    # misc
    Install-Package -ProjectName $updatePackagesProjectName knockoutjs    
    Install-Package -ProjectName $updatePackagesProjectName Modernizr 
    
    # aspnet web api related
    Install-Package -ProjectName $updatePackagesProjectName System.Net.Http
    Install-Package -ProjectName $updatePackagesProjectName System.Net.Http.Formatting
    Install-Package -ProjectName $updatePackagesProjectName System.Web.Http.Common
    Install-Package -ProjectName $updatePackagesProjectName AspNetWebApi.Core
    Install-Package -ProjectName $updatePackagesProjectName AspNetWebApi
       
    # aspnet mvc
    # system.Web.http.common taken care of with web api above
    Install-Package -ProjectName $updatePackagesProjectName Microsoft.Web.Infrastructure
    Install-Package -ProjectName $updatePackagesProjectName AspNetRazor.Core
    Install-Package -ProjectName $updatePackagesProjectName AspNetMvc
}

function Update-AllPackages
{
    Update-Package -Project $updatePackagesProjectName
}